From 329eca6044fdf376a7a89ec7a96dba7a8b884cf7 Mon Sep 17 00:00:00 2001 From: Michael Sullivan Date: Mon, 25 Jun 2012 20:00:46 -0700 Subject: [PATCH] Make vectors uglier ([]/~). Sorry. Should be temporary. Closes #2725. --- src/cargo/cargo.rs | 97 ++-- src/cargo/pgp.rs | 11 +- src/compiletest/compiletest.rs | 14 +- src/compiletest/errors.rs | 10 +- src/compiletest/header.rs | 12 +- src/compiletest/procsrv.rs | 10 +- src/compiletest/runtest.rs | 44 +- src/fuzzer/ast_match.rs | 12 +- src/fuzzer/cycles.rs | 12 +- src/fuzzer/fuzzer.rs | 54 +- src/fuzzer/ivec_fuzz.rs | 54 +- src/fuzzer/rand_util.rs | 20 +- src/libcore/arc.rs | 10 +- src/libcore/char.rs | 8 +- src/libcore/comm.rs | 2 +- src/libcore/dvec.rs | 29 +- src/libcore/either.rs | 46 +- src/libcore/extfmt.rs | 31 +- src/libcore/float.rs | 10 +- src/libcore/int-template.rs | 2 +- src/libcore/int-template/int.rs | 2 +- src/libcore/io.rs | 92 ++-- src/libcore/iter-trait.rs | 8 +- src/libcore/iter.rs | 64 +-- src/libcore/os.rs | 24 +- src/libcore/path.rs | 24 +- src/libcore/ptr.rs | 6 +- src/libcore/rand.rs | 61 +-- src/libcore/result.rs | 21 +- src/libcore/run.rs | 50 +- src/libcore/str.rs | 253 ++++----- src/libcore/to_str.rs | 13 +- src/libcore/uint-template.rs | 2 +- src/libcore/unsafe.rs | 4 +- src/libcore/vec.rs | 490 +++++++++--------- src/libstd/arena.rs | 4 +- src/libstd/bitv.rs | 56 +- src/libstd/deque.rs | 6 +- src/libstd/ebml.rs | 12 +- src/libstd/getopts.rs | 217 ++++---- src/libstd/json.rs | 70 +-- src/libstd/list.rs | 24 +- src/libstd/map.rs | 24 +- src/libstd/md4.rs | 10 +- src/libstd/net_tcp.rs | 48 +- src/libstd/par.rs | 25 +- src/libstd/rope.rs | 16 +- src/libstd/serialization.rs | 8 +- src/libstd/sha1.rs | 40 +- src/libstd/sort.rs | 91 ++-- src/libstd/term.rs | 10 +- src/libstd/test.rs | 38 +- src/libstd/time.rs | 36 +- src/libstd/timer.rs | 2 +- src/libstd/uv_ll.rs | 16 +- src/libsyntax/ast.rs | 119 ++--- src/libsyntax/ast_map.rs | 14 +- src/libsyntax/ast_util.rs | 31 +- src/libsyntax/attr.rs | 58 +-- src/libsyntax/codemap.rs | 12 +- src/libsyntax/ext/auto_serialize.rs | 181 +++---- src/libsyntax/ext/base.rs | 14 +- src/libsyntax/ext/build.rs | 22 +- src/libsyntax/ext/concat_idents.rs | 4 +- src/libsyntax/ext/earley_parser.rs | 24 +- src/libsyntax/ext/expand.rs | 4 +- src/libsyntax/ext/fmt.rs | 19 +- src/libsyntax/ext/log_syntax.rs | 3 +- src/libsyntax/ext/qquote.rs | 49 +- src/libsyntax/ext/simplext.rs | 68 +-- src/libsyntax/fold.rs | 17 +- src/libsyntax/parse.rs | 5 +- src/libsyntax/parse/attr.rs | 29 +- src/libsyntax/parse/comments.rs | 36 +- src/libsyntax/parse/common.rs | 22 +- src/libsyntax/parse/eval.rs | 32 +- src/libsyntax/parse/lexer.rs | 4 +- src/libsyntax/parse/parser.rs | 251 ++++----- src/libsyntax/parse/token.rs | 4 +- src/libsyntax/print/pp.rs | 32 +- src/libsyntax/print/pprust.rs | 75 +-- src/libsyntax/visit.rs | 30 +- src/rustc/back/link.rs | 52 +- src/rustc/back/rpath.rs | 48 +- src/rustc/back/target_strs.rs | 2 +- src/rustc/back/upcall.rs | 44 +- src/rustc/back/x86.rs | 2 +- src/rustc/back/x86_64.rs | 2 +- src/rustc/driver/driver.rs | 20 +- src/rustc/driver/rustc.rs | 6 +- src/rustc/driver/session.rs | 28 +- src/rustc/front/config.rs | 6 +- src/rustc/front/core_inject.rs | 10 +- src/rustc/front/intrinsic_inject.rs | 4 +- src/rustc/front/test.rs | 81 +-- src/rustc/lib/llvm.rs | 15 +- src/rustc/metadata/astencode.rs | 22 +- src/rustc/metadata/creader.rs | 22 +- src/rustc/metadata/csearch.rs | 29 +- src/rustc/metadata/cstore.rs | 34 +- src/rustc/metadata/decoder.rs | 128 ++--- src/rustc/metadata/encoder.rs | 152 +++--- src/rustc/metadata/filesearch.rs | 26 +- src/rustc/metadata/loader.rs | 32 +- src/rustc/metadata/tydecode.rs | 60 +-- src/rustc/metadata/tyencode.rs | 2 +- src/rustc/middle/astencode.rs | 12 +- src/rustc/middle/borrowck/check_loans.rs | 10 +- src/rustc/middle/borrowck/gather_loans.rs | 5 +- src/rustc/middle/capture.rs | 6 +- src/rustc/middle/check_alt.rs | 30 +- src/rustc/middle/freevars.rs | 6 +- src/rustc/middle/kind.rs | 10 +- src/rustc/middle/lint.rs | 8 +- src/rustc/middle/liveness.rs | 31 +- src/rustc/middle/pat_util.rs | 6 +- src/rustc/middle/region.rs | 8 +- src/rustc/middle/resolve.rs | 148 +++--- src/rustc/middle/trans/alt.rs | 128 ++--- src/rustc/middle/trans/base.rs | 379 +++++++------- src/rustc/middle/trans/build.rs | 30 +- src/rustc/middle/trans/closure.rs | 114 ++-- src/rustc/middle/trans/common.rs | 88 ++-- src/rustc/middle/trans/debuginfo.rs | 50 +- src/rustc/middle/trans/impl.rs | 31 +- src/rustc/middle/trans/native.rs | 150 +++--- src/rustc/middle/trans/reachable.rs | 2 +- src/rustc/middle/trans/reflect.rs | 68 +-- src/rustc/middle/trans/shape.rs | 149 +++--- src/rustc/middle/trans/tvec.rs | 57 +- src/rustc/middle/trans/type_of.rs | 34 +- src/rustc/middle/trans/type_use.rs | 4 +- src/rustc/middle/trans/uniq.rs | 4 +- src/rustc/middle/tstate/annotate.rs | 14 +- src/rustc/middle/tstate/auxiliary.rs | 137 ++--- src/rustc/middle/tstate/bitvectors.rs | 6 +- src/rustc/middle/tstate/collect_locals.rs | 12 +- .../middle/tstate/pre_post_conditions.rs | 65 +-- src/rustc/middle/tstate/states.rs | 12 +- src/rustc/middle/tstate/tritv.rs | 6 +- src/rustc/middle/ty.rs | 137 ++--- src/rustc/middle/typeck.rs | 13 +- src/rustc/middle/typeck/astconv.rs | 6 +- src/rustc/middle/typeck/check.rs | 98 ++-- src/rustc/middle/typeck/check/alt.rs | 6 +- src/rustc/middle/typeck/check/method.rs | 4 +- src/rustc/middle/typeck/check/regionmanip.rs | 4 +- src/rustc/middle/typeck/check/vtable.rs | 18 +- src/rustc/middle/typeck/check/writeback.rs | 4 +- src/rustc/middle/typeck/collect.rs | 48 +- src/rustc/middle/typeck/infer.rs | 48 +- src/rustc/util/common.rs | 6 +- src/rustc/util/ppaux.rs | 24 +- src/rustdoc/attr_parser.rs | 12 +- src/rustdoc/attr_pass.rs | 8 +- src/rustdoc/config.rs | 61 +-- src/rustdoc/demo.rs | 4 +- src/rustdoc/desc_to_brief_pass.rs | 12 +- src/rustdoc/doc.rs | 60 +-- src/rustdoc/extract.rs | 22 +- src/rustdoc/markdown_index_pass.rs | 3 +- src/rustdoc/markdown_pass.rs | 26 +- src/rustdoc/markdown_writer.rs | 6 +- src/rustdoc/page_pass.rs | 6 +- src/rustdoc/par.rs | 10 +- src/rustdoc/parse.rs | 4 +- src/rustdoc/path_pass.rs | 14 +- src/rustdoc/prune_hidden_pass.rs | 2 +- src/rustdoc/prune_unexported_pass.rs | 16 +- src/rustdoc/reexport_pass.rs | 46 +- src/rustdoc/rustdoc.rs | 18 +- src/rustdoc/sectionalize_pass.rs | 10 +- src/rustdoc/text_pass.rs | 4 +- src/rustdoc/trim_pass.rs | 2 +- src/rustdoc/tystr_pass.rs | 4 +- src/rustdoc/unindent_pass.rs | 2 +- src/test/auxiliary/cci_class_6.rs | 6 +- src/test/auxiliary/cci_iter_lib.rs | 2 +- src/test/auxiliary/cci_no_inline_lib.rs | 2 +- src/test/auxiliary/issue2378a.rs | 2 +- src/test/auxiliary/issue2378b.rs | 2 +- src/test/bench/core-uint-to-str.rs | 2 +- src/test/bench/core-vec-append.rs | 8 +- src/test/bench/graph500-bfs.rs | 18 +- src/test/bench/msgsend-ring-new.rs | 6 +- src/test/bench/msgsend-ring.rs | 6 +- src/test/bench/msgsend.rs | 6 +- src/test/bench/shootout-ackermann.rs | 2 +- src/test/bench/shootout-binarytrees.rs | 2 +- src/test/bench/shootout-fannkuchredux.rs | 2 +- src/test/bench/shootout-fasta.rs | 24 +- src/test/bench/shootout-fibo.rs | 2 +- src/test/bench/shootout-k-nucleotide.rs | 34 +- src/test/bench/shootout-mandelbrot.rs | 10 +- src/test/bench/shootout-nbody.rs | 14 +- src/test/bench/shootout-pfib.rs | 10 +- src/test/bench/shootout-spectralnorm.rs | 8 +- src/test/bench/shootout-threadring.rs | 2 +- src/test/bench/std-smallintmap.rs | 2 +- src/test/bench/sudoku.rs | 8 +- src/test/bench/task-perf-alloc-unwind.rs | 6 +- src/test/bench/task-perf-one-million.rs | 6 +- src/test/bench/task-perf-spawnalot.rs | 2 +- .../bench/task-perf-word-count-generic.rs | 14 +- src/test/bench/task-perf-word-count.rs | 16 +- src/test/compile-fail/ambig_impl_unify.rs | 6 +- src/test/compile-fail/assign-super.rs | 4 +- src/test/compile-fail/attr-bad-meta.rs | 2 +- src/test/compile-fail/bad-expr-path.rs | 2 +- src/test/compile-fail/bad-expr-path2.rs | 2 +- src/test/compile-fail/bad-main.rs | 2 +- src/test/compile-fail/bad-module.rs | 2 +- .../block-arg-as-stmt-with-value.rs | 2 +- .../block-must-not-have-result-for.rs | 2 +- .../compile-fail/borrowck-assign-comp-idx.rs | 6 +- .../compile-fail/borrowck-binding-mutbl.rs | 4 +- .../compile-fail/borrowck-loan-vec-content.rs | 6 +- .../borrowck-mut-vec-as-imm-slice-bad.rs | 4 +- .../compile-fail/empty-vec-trailing-comma.rs | 2 +- src/test/compile-fail/fail-type-err.rs | 2 +- src/test/compile-fail/import.rs | 2 +- src/test/compile-fail/import2.rs | 2 +- src/test/compile-fail/import3.rs | 2 +- src/test/compile-fail/import4.rs | 2 +- .../infinite-vec-type-recursion.rs | 4 +- src/test/compile-fail/issue-1655.rs | 2 +- src/test/compile-fail/issue-2149.rs | 4 +- src/test/compile-fail/issue-2150.rs | 2 +- src/test/compile-fail/issue-2509-a.rs | 2 +- src/test/compile-fail/liveness-issue-2163.rs | 4 +- .../liveness-use-in-index-lvalue.rs | 2 +- src/test/compile-fail/lub-in-args.rs | 4 +- .../compile-fail/mutable-huh-variance-box.rs | 6 +- .../compile-fail/mutable-huh-variance-deep.rs | 4 +- .../compile-fail/mutable-huh-variance-ptr.rs | 8 +- .../compile-fail/mutable-huh-variance-rec.rs | 6 +- .../mutable-huh-variance-unique.rs | 6 +- .../compile-fail/mutable-huh-variance-vec1.rs | 6 +- .../compile-fail/mutable-huh-variance-vec2.rs | 6 +- .../compile-fail/mutable-huh-variance-vec3.rs | 4 +- .../compile-fail/mutable-huh-variance-vec4.rs | 18 +- .../compile-fail/mutable-huh-vec-assign.rs | 4 +- src/test/compile-fail/nested-ty-params.rs | 2 +- src/test/compile-fail/no-capture-arc.rs | 2 +- src/test/compile-fail/no-reuse-move-arc.rs | 2 +- src/test/compile-fail/non-const.rs | 4 +- src/test/compile-fail/non-copyable-void.rs | 2 +- src/test/compile-fail/nonsense-constraints.rs | 6 +- src/test/compile-fail/pattern-tyvar-2.rs | 2 +- src/test/compile-fail/pattern-tyvar.rs | 2 +- src/test/compile-fail/qquote-1.rs | 2 +- src/test/compile-fail/qquote-2.rs | 2 +- .../regions-escape-loop-via-vec.rs | 4 +- src/test/compile-fail/seq-args.rs | 2 +- .../tag-that-dare-not-speak-its-name.rs | 2 +- .../tstate-unsat-in-called-fn-expr.rs | 2 +- .../compile-fail/tstate-unsat-in-fn-expr.rs | 2 +- src/test/compile-fail/unique-vec-res.rs | 6 +- src/test/compile-fail/vec-add.rs | 50 +- src/test/compile-fail/vec-concat-bug.rs | 6 +- src/test/compile-fail/vec-field.rs | 2 +- src/test/compile-fail/vec-res-add.rs | 4 +- src/test/compile-fail/vector-no-ann.rs | 2 +- src/test/pretty/blank-lines.rs | 4 +- src/test/pretty/block-disambig.rs | 4 +- src/test/pretty/vec-comments.pp | 8 +- src/test/pretty/vec-comments.rs | 8 +- src/test/pretty/vec-type.pp | 4 +- src/test/pretty/vec-type.rs | 4 +- .../bug-2470-bounds-check-overflow-2.rs | 2 +- .../bug-2470-bounds-check-overflow-3.rs | 4 +- .../bug-2470-bounds-check-overflow.rs | 2 +- src/test/run-fail/unwind-box-vec.rs | 2 +- src/test/run-fail/unwind-interleaved.rs | 4 +- src/test/run-fail/unwind-misc-1.rs | 6 +- src/test/run-fail/unwind-partial-box.rs | 2 +- src/test/run-fail/unwind-partial-unique.rs | 2 +- src/test/run-fail/unwind-partial-vec.rs | 4 +- src/test/run-fail/unwind-rec.rs | 2 +- src/test/run-fail/unwind-rec2.rs | 6 +- src/test/run-fail/unwind-tup.rs | 2 +- src/test/run-fail/unwind-tup2.rs | 6 +- src/test/run-fail/vec-overrun.rs | 2 +- src/test/run-fail/vec-underrun.rs | 2 +- src/test/run-fail/zip-different-lengths.rs | 8 +- src/test/run-pass-fulldeps/issue-1926.rs | 2 +- src/test/run-pass-fulldeps/qquote.rs | 2 +- .../run-pass/alloca-from-derived-tydesc.rs | 6 +- src/test/run-pass/alt-join.rs | 4 +- src/test/run-pass/argv.rs | 4 +- src/test/run-pass/auto-loop.rs | 2 +- src/test/run-pass/auto_serialize.rs | 8 +- src/test/run-pass/autobind.rs | 4 +- .../block-arg-can-be-followed-by-binop.rs | 2 +- .../block-arg-can-be-followed-by-block-arg.rs | 2 +- .../block-arg-can-be-followed-by-call.rs | 2 +- src/test/run-pass/block-arg-in-parentheses.rs | 16 +- src/test/run-pass/block-arg.rs | 2 +- src/test/run-pass/block-iter-1.rs | 4 +- src/test/run-pass/block-iter-2.rs | 4 +- src/test/run-pass/block-vec-map2.rs | 6 +- .../run-pass/borrowck-mut-vec-as-imm-slice.rs | 4 +- src/test/run-pass/break.rs | 4 +- src/test/run-pass/cci_iter_exe.rs | 2 +- src/test/run-pass/cci_no_inline_exe.rs | 2 +- .../run-pass/class-iface-bounded-param.rs | 2 +- .../class-implements-multiple-ifaces.rs | 10 +- .../class-poly-methods-cross-crate.rs | 4 +- src/test/run-pass/class-poly-methods.rs | 10 +- src/test/run-pass/const-bound.rs | 2 +- src/test/run-pass/cycle-collection4.rs | 4 +- src/test/run-pass/deep-vector.rs | 2 +- src/test/run-pass/deep-vector2.rs | 2 +- src/test/run-pass/dvec-test.rs | 10 +- src/test/run-pass/empty-mutable-vec.rs | 2 +- src/test/run-pass/expr-alt-fail.rs | 2 +- src/test/run-pass/expr-fn.rs | 2 +- src/test/run-pass/for-destruct.rs | 2 +- src/test/run-pass/for-loop-fail.rs | 2 +- src/test/run-pass/foreach-nested.rs | 2 +- src/test/run-pass/generic-ivec-leak.rs | 2 +- src/test/run-pass/generic-ivec.rs | 2 +- src/test/run-pass/hashmap-memory.rs | 6 +- src/test/run-pass/iface-generic.rs | 14 +- src/test/run-pass/iface-to-str.rs | 8 +- src/test/run-pass/impl-variance.rs | 8 +- src/test/run-pass/import-glob-crate.rs | 4 +- src/test/run-pass/import-in-block.rs | 4 +- src/test/run-pass/import4.rs | 2 +- src/test/run-pass/import5.rs | 2 +- src/test/run-pass/import7.rs | 2 +- src/test/run-pass/infer-fn-tail-expr.rs | 2 +- src/test/run-pass/integral-indexing.rs | 2 +- src/test/run-pass/issue-1821.rs | 2 +- src/test/run-pass/issue-1989.rs | 2 +- src/test/run-pass/issue-2101.rs | 2 +- src/test/run-pass/issue-2502.rs | 6 +- src/test/run-pass/issue-2611.rs | 2 +- src/test/run-pass/issue-687.rs | 16 +- src/test/run-pass/iter-all.rs | 6 +- src/test/run-pass/iter-any.rs | 6 +- src/test/run-pass/iter-contains.rs | 10 +- src/test/run-pass/iter-count.rs | 8 +- src/test/run-pass/iter-eachi.rs | 2 +- src/test/run-pass/iter-filter-to-vec.rs | 10 +- src/test/run-pass/iter-flat-map-to-vec.rs | 22 +- src/test/run-pass/iter-foldl.rs | 4 +- src/test/run-pass/iter-map-to-vec.rs | 10 +- src/test/run-pass/iter-min-max.rs | 8 +- src/test/run-pass/iter-to-vec.rs | 12 +- src/test/run-pass/ivec-add.rs | 4 +- src/test/run-pass/ivec-pass-by-value.rs | 4 +- src/test/run-pass/ivec-tag.rs | 8 +- src/test/run-pass/lambda-infer-unresolved.rs | 4 +- src/test/run-pass/linear-for-loop.rs | 2 +- src/test/run-pass/liveness-move-in-loop.rs | 4 +- .../log-knows-the-names-of-variants-in-std.rs | 2 +- src/test/run-pass/log-linearized.rs | 4 +- src/test/run-pass/log-str.rs | 2 +- src/test/run-pass/loop-scope.rs | 2 +- src/test/run-pass/main-ivec.rs | 2 +- src/test/run-pass/maybe-mutable.rs | 6 +- src/test/run-pass/mod-view-items.rs | 2 +- src/test/run-pass/monad.rs | 6 +- src/test/run-pass/morestack6.rs | 2 +- src/test/run-pass/move-arg-2-unique.rs | 4 +- src/test/run-pass/move-arg-2.rs | 4 +- src/test/run-pass/mutable-alias-vec.rs | 4 +- .../run-pass/mutable-huh-variance-vec1.rs | 4 +- .../run-pass/mutable-huh-variance-vec2.rs | 4 +- src/test/run-pass/mutable-vec-drop.rs | 4 +- src/test/run-pass/native-fn-linkname.rs | 2 +- src/test/run-pass/native2.rs | 2 +- src/test/run-pass/newtype-polymorphic.rs | 6 +- .../run-pass/operator-overloading-leaks.rs | 24 +- src/test/run-pass/option-ext.rs | 2 +- src/test/run-pass/path.rs | 2 +- src/test/run-pass/pure-sum.rs | 8 +- src/test/run-pass/rcvr-borrowed-to-slice.rs | 6 +- src/test/run-pass/reflect-visit-data.rs | 12 +- src/test/run-pass/reflect-visit-type.rs | 6 +- src/test/run-pass/regions-borrow-evec-uniq.rs | 2 +- src/test/run-pass/ret-break-cont-in-block.rs | 22 +- src/test/run-pass/sendfn-deep-copy.rs | 2 +- src/test/run-pass/seq-compare.rs | 18 +- src/test/run-pass/shadow.rs | 8 +- .../run-pass/shape_intrinsic_tag_then_rec.rs | 2 +- src/test/run-pass/size-and-align.rs | 4 +- src/test/run-pass/static-impl.rs | 14 +- src/test/run-pass/swap-2.rs | 2 +- src/test/run-pass/tag-in-block.rs | 2 +- src/test/run-pass/task-comm-16.rs | 2 +- src/test/run-pass/task-comm-3.rs | 4 +- src/test/run-pass/task-comm.rs | 8 +- src/test/run-pass/task-killjoin-rsrc.rs | 4 +- src/test/run-pass/type-param.rs | 2 +- src/test/run-pass/type-params-in-for-each.rs | 4 +- src/test/run-pass/type-ptr.rs | 2 +- src/test/run-pass/unique-assign-generic.rs | 4 +- src/test/run-pass/unique-autoderef-index.rs | 2 +- src/test/run-pass/unique-create.rs | 2 +- src/test/run-pass/unique-drop-complex.rs | 2 +- src/test/run-pass/unique-in-vec-copy.rs | 2 +- src/test/run-pass/unique-in-vec.rs | 4 +- src/test/run-pass/utf8_chars.rs | 8 +- src/test/run-pass/vec-concat.rs | 6 +- src/test/run-pass/vec-drop.rs | 4 +- src/test/run-pass/vec-growth.rs | 10 +- src/test/run-pass/vec-ivec-deadlock.rs | 2 +- src/test/run-pass/vec-late-init.rs | 4 +- src/test/run-pass/vec-push.rs | 2 +- src/test/run-pass/vec-self-append.rs | 6 +- src/test/run-pass/vec-slice.rs | 2 +- src/test/run-pass/vec-trailing-comma.rs | 8 +- src/test/run-pass/vec.rs | 2 +- src/test/run-pass/vector-no-ann-2.rs | 2 +- src/test/run-pass/while-with-break.rs | 4 +- src/test/run-pass/zip-same-length.rs | 8 +- 418 files changed, 4123 insertions(+), 4034 deletions(-) diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index a33f05a95f6..6f89e0a8fdf 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -21,15 +21,15 @@ type package = { method: str, description: str, ref: option, - tags: [str], - versions: [(str, str)] + tags: [str]/~, + versions: [(str, str)]/~ }; type local_package = { name: str, metaname: str, version: str, - files: [str] + files: [str]/~ }; type source = @{ @@ -38,7 +38,7 @@ type source = @{ mut method: str, mut key: option, mut keyfp: option, - mut packages: [mut package] + mut packages: [mut package]/~ }; type cargo = { @@ -62,21 +62,21 @@ type crate = { desc: option, sigs: option, crate_type: option, - deps: [str] + deps: [str]/~ }; type options = { test: bool, mode: mode, - free: [str], + free: [str]/~, help: bool, }; enum mode { system_mode, user_mode, local_mode } -fn opts() -> [getopts::opt] { +fn opts() -> [getopts::opt]/~ { [optflag("g"), optflag("G"), optflag("test"), - optflag("h"), optflag("help")] + optflag("h"), optflag("help")]/~ } fn info(msg: str) { @@ -216,7 +216,7 @@ fn assume_source_method(url: str) -> str { "curl" } -fn load_link(mis: [@ast::meta_item]) -> (option, +fn load_link(mis: [@ast::meta_item]/~) -> (option, option, option) { let mut name = none; @@ -240,7 +240,7 @@ fn load_link(mis: [@ast::meta_item]) -> (option, fn load_crate(filename: str) -> option { let sess = parse::new_parse_sess(none); - let c = parse::parse_crate_from_crate_file(filename, [], sess); + let c = parse::parse_crate_from_crate_file(filename, []/~, sess); let mut name = none; let mut vers = none; @@ -275,7 +275,7 @@ fn load_crate(filename: str) -> option { } type env = @{ - mut deps: [str] + mut deps: [str]/~ }; fn goto_view_item(e: env, i: @ast::view_item) { @@ -283,7 +283,7 @@ fn load_crate(filename: str) -> option { ast::view_item_use(ident, metas, id) { let name_items = attr::find_meta_items_by_name(metas, "name"); let m = if name_items.is_empty() { - metas + [attr::mk_name_value_item_str(@"name", *ident)] + metas + [attr::mk_name_value_item_str(@"name", *ident)]/~ } else { metas }; @@ -326,7 +326,7 @@ fn load_crate(filename: str) -> option { } let e = @{ - mut deps: [] + mut deps: []/~ }; let v = visit::mk_simple_visitor(@{ visit_view_item: {|a|goto_view_item(e, a)}, @@ -424,7 +424,7 @@ fn parse_source(name: str, j: json::json) -> source { mut method: method, mut key: key, mut keyfp: keyfp, - mut packages: [mut] }; + mut packages: [mut]/~ }; } _ { fail "needed dict value in source"; } }; @@ -498,7 +498,7 @@ fn load_one_source_package(src: source, p: map::hashmap) { _ { none } }; - let mut tags = []; + let mut tags = []/~; alt p.find("tags") { some(json::list(js)) { for (*js).each {|j| @@ -528,7 +528,7 @@ fn load_one_source_package(src: source, p: map::hashmap) { description: description, ref: ref, tags: tags, - versions: [] + versions: []/~ }; alt src.packages.position({ |pkg| pkg.uuid == uuid }) { @@ -595,7 +595,7 @@ fn load_source_packages(c: cargo, src: source) { }; } -fn build_cargo_options(argv: [str]) -> options { +fn build_cargo_options(argv: [str]/~) -> options { let match = alt getopts::getopts(argv, opts()) { result::ok(m) { m } result::err(f) { @@ -699,19 +699,19 @@ fn for_each_package(c: cargo, b: fn(source, package)) { fn run_programs(buildpath: str) { let newv = os::list_dir_path(buildpath); for newv.each {|ct| - run::run_program(ct, []); + run::run_program(ct, []/~); } } // Runs rustc in with the given flags // and returns fn run_in_buildpath(what: str, path: str, subdir: str, cf: str, - extra_flags: [str]) -> option { + extra_flags: [str]/~) -> option { let buildpath = path::connect(path, subdir); need_dir(buildpath); #debug("%s: %s -> %s", what, cf, buildpath); let p = run::program_output(rustc_sysroot(), - ["--out-dir", buildpath, cf] + extra_flags); + ["--out-dir", buildpath, cf]/~ + extra_flags); if p.status != 0 { error(#fmt["rustc failed: %d\n%s\n%s", p.status, p.err, p.out]); ret none; @@ -721,7 +721,7 @@ fn run_in_buildpath(what: str, path: str, subdir: str, cf: str, fn test_one_crate(_c: cargo, path: str, cf: str) { let buildpath = alt run_in_buildpath("testing", path, "/test", cf, - [ "--test"]) { + [ "--test"]/~) { none { ret; } some(bp) { bp } }; @@ -730,7 +730,7 @@ fn test_one_crate(_c: cargo, path: str, cf: str) { fn install_one_crate(c: cargo, path: str, cf: str) { let buildpath = alt run_in_buildpath("installing", path, - "/build", cf, []) { + "/build", cf, []/~) { none { ret; } some(bp) { bp } }; @@ -758,7 +758,7 @@ fn install_one_crate(c: cargo, path: str, cf: str) { fn rustc_sysroot() -> str { alt os::self_exe_path() { some(path) { - let path = [path, "..", "bin", "rustc"]; + let path = [path, "..", "bin", "rustc"]/~; check vec::is_not_empty(path); let rustc = path::normalize(path::connect_many(path)); #debug(" rustc: %s", rustc); @@ -772,7 +772,7 @@ fn install_source(c: cargo, path: str) { #debug("source: %s", path); os::change_dir(path); - let mut cratefiles = []; + let mut cratefiles = []/~; for os::walk_dir(".") {|p| if str::ends_with(p, ".rc") { vec::push(cratefiles, p); @@ -811,11 +811,11 @@ fn install_source(c: cargo, path: str) { } fn install_git(c: cargo, wd: str, url: str, ref: option) { - run::program_output("git", ["clone", url, wd]); + run::program_output("git", ["clone", url, wd]/~); if option::is_some(ref) { let r = option::get(ref); os::change_dir(wd); - run::run_program("git", ["checkout", r]); + run::run_program("git", ["checkout", r]/~); } install_source(c, wd); @@ -824,18 +824,18 @@ fn install_git(c: cargo, wd: str, url: str, ref: option) { fn install_curl(c: cargo, wd: str, url: str) { let tarpath = path::connect(wd, "pkg.tar"); let p = run::program_output("curl", ["-f", "-s", "-o", - tarpath, url]); + tarpath, url]/~); if p.status != 0 { fail #fmt["fetch of %s failed: %s", url, p.err]; } run::run_program("tar", ["-x", "--strip-components=1", - "-C", wd, "-f", tarpath]); + "-C", wd, "-f", tarpath]/~); install_source(c, wd); } fn install_file(c: cargo, wd: str, path: str) { run::program_output("tar", ["-x", "--strip-components=1", - "-C", wd, "-f", path]); + "-C", wd, "-f", path]/~); install_source(c, wd); } @@ -868,7 +868,7 @@ fn cargo_suggestion(c: cargo, fallback: fn()) } fn install_uuid(c: cargo, wd: str, uuid: str) { - let mut ps = []; + let mut ps = []/~; for_each_package(c, { |s, p| if p.uuid == uuid { vec::grow(ps, 1u, (s.name, copy p)); @@ -892,7 +892,7 @@ fn install_uuid(c: cargo, wd: str, uuid: str) { } fn install_named(c: cargo, wd: str, name: str) { - let mut ps = []; + let mut ps = []/~; for_each_package(c, { |s, p| if p.name == name { vec::grow(ps, 1u, (s.name, copy p)); @@ -1082,7 +1082,7 @@ fn cmd_install(c: cargo) unsafe { if vec::len(c.opts.free) == 2u { let cwd = os::getcwd(); - let status = run::run_program("cp", ["-R", cwd, wd]); + let status = run::run_program("cp", ["-R", cwd, wd]/~); if status != 0 { fail #fmt("could not copy directory: %s", cwd); @@ -1135,7 +1135,7 @@ fn sync_one_file(c: cargo, dir: str, src: source) -> bool { alt copy src.key { some(u) { let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, - u]); + u]/~); if p.status != 0 { error(#fmt["fetch for source %s (key %s) failed", name, u]); ret false; @@ -1209,7 +1209,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool { } else { let p = run::program_output("git", ["reset", "--hard", - "HEAD@{1}"]); + "HEAD@{1}"]/~); if p.status != 0 { msg(name, insecure); @@ -1218,7 +1218,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool { } if !os::path_exists(path::connect(dir, ".git")) { - let p = run::program_output("git", ["clone", url, dir]); + let p = run::program_output("git", ["clone", url, dir]/~); if p.status != 0 { error(#fmt["fetch for source %s (url %s) failed", name, url]); @@ -1231,7 +1231,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool { ret false; } - let p = run::program_output("git", ["pull"]); + let p = run::program_output("git", ["pull"]/~); if p.status != 0 { error(#fmt["fetch for source %s (url %s) failed", name, url]); @@ -1244,7 +1244,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool { alt copy src.key { some(u) { let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, - u]); + u]/~); if p.status != 0 { error(#fmt["fetch for source %s (key %s) failed", name, u]); rollback(name, dir, false); @@ -1303,7 +1303,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool { url += "/packages.json"; } - let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]); + let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]/~); if p.status != 0 { error(#fmt["fetch for source %s (url %s) failed", name, url]); @@ -1311,7 +1311,8 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool { } if smart { url = src.url + "/source.json"; - let p = run::program_output("curl", ["-f", "-s", "-o", srcfile, url]); + let p = + run::program_output("curl", ["-f", "-s", "-o", srcfile, url]/~); if p.status == 0 { has_src_file = true; @@ -1321,7 +1322,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool { alt copy src.key { some(u) { let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, - u]); + u]/~); if p.status != 0 { error(#fmt["fetch for source %s (key %s) failed", name, u]); ret false; @@ -1340,7 +1341,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool { } let mut p = run::program_output("curl", ["-f", "-s", "-o", - sigfile, url]); + sigfile, url]/~); if p.status != 0 { error(#fmt["fetch for source %s (sig %s) failed", name, url]); ret false; @@ -1358,7 +1359,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool { url = src.url + "/source.json.sig"; p = run::program_output("curl", ["-f", "-s", "-o", srcsigfile, - url]); + url]/~); if p.status != 0 { error(#fmt["fetch for source %s (sig %s) failed", name, url]); @@ -1422,13 +1423,15 @@ fn cmd_init(c: cargo) { let sigfile = path::connect(c.root, "sources.json.sig"); let destsrcfile = path::connect(c.root, "sources.json"); - let p = run::program_output("curl", ["-f", "-s", "-o", srcfile, srcurl]); + let p = + run::program_output("curl", ["-f", "-s", "-o", srcfile, srcurl]/~); if p.status != 0 { error(#fmt["fetch of sources.json failed: %s", p.out]); ret; } - let p = run::program_output("curl", ["-f", "-s", "-o", sigfile, sigurl]); + let p = + run::program_output("curl", ["-f", "-s", "-o", sigfile, sigurl]/~); if p.status != 0 { error(#fmt["fetch of sources.json.sig failed: %s", p.out]); ret; @@ -1530,7 +1533,7 @@ fn cmd_search(c: cargo) { fn install_to_dir(srcfile: str, destdir: str) { let newfile = path::connect(destdir, path::basename(srcfile)); - let status = run::run_program("cp", ["-r", srcfile, newfile]); + let status = run::run_program("cp", ["-r", srcfile, newfile]/~); if status == 0 { info(#fmt["installed: '%s'", newfile]); } else { @@ -1647,7 +1650,7 @@ fn cmd_sources(c: cargo) { mut method: assume_source_method(url), mut key: none, mut keyfp: none, - mut packages: [mut] + mut packages: [mut]/~ }); info(#fmt("added source: %s", name)); } @@ -1865,7 +1868,7 @@ Commands: set-method Change the method for a source."); } -fn main(argv: [str]) { +fn main(argv: [str]/~) { let o = build_cargo_options(argv); if vec::len(o.free) < 2u { diff --git a/src/cargo/pgp.rs b/src/cargo/pgp.rs index c932a5f4eda..8c0eb7f6ba2 100644 --- a/src/cargo/pgp.rs +++ b/src/cargo/pgp.rs @@ -1,4 +1,4 @@ -fn gpg(args: [str]) -> { status: int, out: str, err: str } { +fn gpg(args: [str]/~) -> { status: int, out: str, err: str } { ret run::program_output("gpg", args); } @@ -59,7 +59,7 @@ fn signing_key_fp() -> str { } fn supported() -> bool { - let r = gpg(["--version"]); + let r = gpg(["--version"]/~); r.status == 0 } @@ -67,7 +67,7 @@ fn init(root: str) { let p = path::connect(root, "gpg"); if !os::path_is_dir(p) { os::make_dir(p, 0x1c0i32); - let p = run::start_program("gpg", ["--homedir", p, "--import"]); + let p = run::start_program("gpg", ["--homedir", p, "--import"]/~); p.input().write_str(signing_key()); let s = p.finish(); if s != 0 { @@ -78,7 +78,8 @@ fn init(root: str) { fn add(root: str, key: str) { let path = path::connect(root, "gpg"); - let p = run::program_output("gpg", ["--homedir", path, "--import", key]); + let p = + run::program_output("gpg", ["--homedir", path, "--import", key]/~); if p.status != 0 { fail "pgp add failed: " + p.out; } @@ -87,7 +88,7 @@ fn add(root: str, key: str) { fn verify(root: str, data: str, sig: str, keyfp: str) -> bool { let path = path::connect(root, "gpg"); let p = gpg(["--homedir", path, "--with-fingerprint", "--verify", sig, - data]); + data]/~); let res = "Primary key fingerprint: " + keyfp; for str::split_char(p.err, '\n').each {|line| if line == res { ret true; } diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index 06428a993d2..07bb35dc292 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -21,13 +21,13 @@ import common::mode_pretty; import common::mode; import util::logv; -fn main(args: [str]) { +fn main(args: [str]/~) { let config = parse_config(args); log_config(config); run_tests(config); } -fn parse_config(args: [str]) -> config { +fn parse_config(args: [str]/~) -> config { let opts = [getopts::reqopt("compile-lib-path"), getopts::reqopt("run-lib-path"), getopts::reqopt("rustc-path"), getopts::reqopt("src-base"), @@ -36,7 +36,7 @@ fn parse_config(args: [str]) -> config { getopts::reqopt("mode"), getopts::optflag("ignored"), getopts::optopt("runtool"), getopts::optopt("rustcflags"), getopts::optflag("verbose"), - getopts::optopt("logfile")]; + getopts::optopt("logfile")]/~; check (vec::is_not_empty(args)); let args_ = vec::tail(args); @@ -132,9 +132,9 @@ fn test_opts(config: config) -> test::test_opts { } } -fn make_tests(config: config) -> [test::test_desc] { +fn make_tests(config: config) -> [test::test_desc]/~ { #debug("making tests from %s", config.src_base); - let mut tests = []; + let mut tests = []/~; for os::list_dir_path(config.src_base).each {|file| let file = file; #debug("inspecting file %s", file); @@ -148,8 +148,8 @@ fn make_tests(config: config) -> [test::test_desc] { fn is_test(config: config, testfile: str) -> bool { // Pretty-printer does not work with .rc files yet let valid_extensions = - alt config.mode { mode_pretty { [".rs"] } _ { [".rc", ".rs"] } }; - let invalid_prefixes = [".", "#", "~"]; + alt config.mode { mode_pretty { [".rs"]/~ } _ { [".rc", ".rs"]/~ } }; + let invalid_prefixes = [".", "#", "~"]/~; let name = path::basename(testfile); let mut valid = false; diff --git a/src/compiletest/errors.rs b/src/compiletest/errors.rs index 1e04eb52109..5d1bba368d4 100644 --- a/src/compiletest/errors.rs +++ b/src/compiletest/errors.rs @@ -8,8 +8,8 @@ export expected_error; type expected_error = { line: uint, kind: str, msg: str }; // Load any test directives embedded in the file -fn load_errors(testfile: str) -> [expected_error] { - let mut error_patterns = []; +fn load_errors(testfile: str) -> [expected_error]/~ { + let mut error_patterns = []/~; let rdr = result::get(io::file_reader(testfile)); let mut line_num = 1u; while !rdr.eof() { @@ -20,11 +20,11 @@ fn load_errors(testfile: str) -> [expected_error] { ret error_patterns; } -fn parse_expected(line_num: uint, line: str) -> [expected_error] unsafe { +fn parse_expected(line_num: uint, line: str) -> [expected_error]/~ unsafe { let error_tag = "//!"; let mut idx; alt str::find_str(line, error_tag) { - option::none { ret []; } + option::none { ret []/~; } option::some(nn) { idx = (nn as uint) + str::len(error_tag); } } @@ -49,5 +49,5 @@ fn parse_expected(line_num: uint, line: str) -> [expected_error] unsafe { #debug("line=%u kind=%s msg=%s", line_num - adjust_line, kind, msg); - ret [{line: line_num - adjust_line, kind: kind, msg: msg}]; + ret [{line: line_num - adjust_line, kind: kind, msg: msg}]/~; } diff --git a/src/compiletest/header.rs b/src/compiletest/header.rs index 642e2073e6b..7d286211acb 100644 --- a/src/compiletest/header.rs +++ b/src/compiletest/header.rs @@ -10,23 +10,23 @@ export is_test_ignored; type test_props = { // Lines that should be expected, in order, on standard out - error_patterns: [str], + error_patterns: [str]/~, // Extra flags to pass to the compiler compile_flags: option, // If present, the name of a file that this test should match when // pretty-printed pp_exact: option, // Modules from aux directory that should be compiled - aux_builds: [str], + aux_builds: [str]/~, // Environment settings to use during execution - exec_env: [(str,str)] + exec_env: [(str,str)]/~ }; // Load any test directives embedded in the file fn load_props(testfile: str) -> test_props { - let mut error_patterns = []; - let mut aux_builds = []; - let mut exec_env = []; + let mut error_patterns = []/~; + let mut aux_builds = []/~; + let mut exec_env = []/~; let mut compile_flags = option::none; let mut pp_exact = option::none; for iter_header(testfile) {|ln| diff --git a/src/compiletest/procsrv.rs b/src/compiletest/procsrv.rs index 354d966c2cc..374ad1cda08 100644 --- a/src/compiletest/procsrv.rs +++ b/src/compiletest/procsrv.rs @@ -5,7 +5,7 @@ import libc::{c_int, pid_t}; export run; #[cfg(target_os = "win32")] -fn target_env(lib_path: str, prog: str) -> [(str,str)] { +fn target_env(lib_path: str, prog: str) -> [(str,str)]/~ { let mut env = os::env(); @@ -27,16 +27,16 @@ fn target_env(lib_path: str, prog: str) -> [(str,str)] { #[cfg(target_os = "linux")] #[cfg(target_os = "macos")] #[cfg(target_os = "freebsd")] -fn target_env(_lib_path: str, _prog: str) -> [(str,str)] { - [] +fn target_env(_lib_path: str, _prog: str) -> [(str,str)]/~ { + []/~ } // FIXME (#2659): This code is duplicated in core::run::program_output fn run(lib_path: str, prog: str, - args: [str], - env: [(str, str)], + args: [str]/~, + env: [(str, str)]/~, input: option) -> {status: int, out: str, err: str} { let pipe_in = os::pipe(); diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 749af7d9f73..7d33843c0d3 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -92,7 +92,7 @@ fn run_pretty_test(config: config, props: test_props, testfile: str) { let rounds = alt props.pp_exact { option::some(_) { 1 } option::none { 2 } }; - let mut srcs = [result::get(io::read_whole_file_str(testfile))]; + let mut srcs = [result::get(io::read_whole_file_str(testfile))]/~; let mut round = 0; while round < rounds { @@ -139,12 +139,12 @@ fn run_pretty_test(config: config, props: test_props, testfile: str) { fn print_source(config: config, testfile: str, src: str) -> procres { compose_and_run(config, testfile, make_pp_args(config, testfile), - [], config.compile_lib_path, option::some(src)) + []/~, config.compile_lib_path, option::some(src)) } fn make_pp_args(config: config, _testfile: str) -> procargs { let prog = config.rustc_path; - let args = ["-", "--pretty", "normal"]; + let args = ["-", "--pretty", "normal"]/~; ret {prog: prog, args: args}; } @@ -179,7 +179,7 @@ actual:\n\ fn make_typecheck_args(config: config, testfile: str) -> procargs { let prog = config.rustc_path; let mut args = ["-", "--no-trans", "--lib", "-L", config.build_base, - "-L", aux_output_dir_name(config, testfile)]; + "-L", aux_output_dir_name(config, testfile)]/~; args += split_maybe_args(config.rustcflags); ret {prog: prog, args: args}; } @@ -227,7 +227,7 @@ fn check_error_patterns(props: test_props, } } -fn check_expected_errors(expected_errors: [errors::expected_error], +fn check_expected_errors(expected_errors: [errors::expected_error]/~, testfile: str, procres: procres) { @@ -286,13 +286,13 @@ fn check_expected_errors(expected_errors: [errors::expected_error], } } -type procargs = {prog: str, args: [str]}; +type procargs = {prog: str, args: [str]/~}; type procres = {status: int, stdout: str, stderr: str, cmdline: str}; fn compile_test(config: config, props: test_props, testfile: str) -> procres { - let link_args = ["-L", aux_output_dir_name(config, testfile)]; + let link_args = ["-L", aux_output_dir_name(config, testfile)]/~; compose_and_run_compiler( config, props, testfile, make_compile_args(config, props, link_args, @@ -319,14 +319,14 @@ fn compose_and_run_compiler( ensure_dir(aux_output_dir_name(config, testfile)); } - let extra_link_args = ["-L", aux_output_dir_name(config, testfile)]; + let extra_link_args = ["-L", aux_output_dir_name(config, testfile)]/~; vec::iter(props.aux_builds) {|rel_ab| let abs_ab = path::connect(config.aux_base, rel_ab); let aux_args = - make_compile_args(config, props, ["--lib"] + extra_link_args, + make_compile_args(config, props, ["--lib"]/~ + extra_link_args, {|a,b|make_lib_name(a, b, testfile)}, abs_ab); - let auxres = compose_and_run(config, abs_ab, aux_args, [], + let auxres = compose_and_run(config, abs_ab, aux_args, []/~, config.compile_lib_path, option::none); if auxres.status != 0 { fatal_procres( @@ -335,7 +335,7 @@ fn compose_and_run_compiler( } } - compose_and_run(config, testfile, args, [], + compose_and_run(config, testfile, args, []/~, config.compile_lib_path, input) } @@ -348,19 +348,19 @@ fn ensure_dir(path: path) { fn compose_and_run(config: config, testfile: str, procargs: procargs, - procenv: [(str, str)], + procenv: [(str, str)]/~, lib_path: str, input: option) -> procres { ret program_output(config, testfile, lib_path, procargs.prog, procargs.args, procenv, input); } -fn make_compile_args(config: config, props: test_props, extras: [str], +fn make_compile_args(config: config, props: test_props, extras: [str]/~, xform: fn(config, str) -> str, testfile: str) -> procargs { let prog = config.rustc_path; let mut args = [testfile, "-o", xform(config, testfile), - "-L", config.build_base] + extras; + "-L", config.build_base]/~ + extras; args += split_maybe_args(config.rustcflags); args += split_maybe_args(props.compile_flags); ret {prog: prog, args: args}; @@ -390,12 +390,12 @@ fn make_run_args(config: config, _props: test_props, testfile: str) -> split_maybe_args(runtool) }; - let args = toolargs + [make_exe_name(config, testfile)]; + let args = toolargs + [make_exe_name(config, testfile)]/~; ret {prog: args[0], args: vec::slice(args, 1u, vec::len(args))}; } -fn split_maybe_args(argstr: option) -> [str] { - fn rm_whitespace(v: [str]) -> [str] { +fn split_maybe_args(argstr: option) -> [str]/~ { + fn rm_whitespace(v: [str]/~) -> [str]/~ { fn flt(&&s: str) -> option { if !str::is_whitespace(s) { option::some(s) } else { option::none } } @@ -404,12 +404,12 @@ fn split_maybe_args(argstr: option) -> [str] { alt argstr { option::some(s) { rm_whitespace(str::split_char(s, ' ')) } - option::none { [] } + option::none { []/~ } } } fn program_output(config: config, testfile: str, lib_path: str, prog: str, - args: [str], env: [(str, str)], + args: [str]/~, env: [(str, str)]/~, input: option) -> procres { let cmdline = { @@ -429,12 +429,12 @@ fn program_output(config: config, testfile: str, lib_path: str, prog: str, #[cfg(target_os = "linux")] #[cfg(target_os = "macos")] #[cfg(target_os = "freebsd")] -fn make_cmdline(_libpath: str, prog: str, args: [str]) -> str { +fn make_cmdline(_libpath: str, prog: str, args: [str]/~) -> str { #fmt["%s %s", prog, str::connect(args, " ")] } #[cfg(target_os = "win32")] -fn make_cmdline(libpath: str, prog: str, args: [str]) -> str { +fn make_cmdline(libpath: str, prog: str, args: [str]/~) -> str { #fmt["%s %s %s", lib_path_cmd_prefix(libpath), prog, str::connect(args, " ")] } @@ -454,7 +454,7 @@ fn dump_output(config: config, testfile: str, out: str, err: str) { fn dump_output_file(config: config, testfile: str, out: str, extension: str) { let outfile = make_out_name(config, testfile, extension); let writer = result::get( - io::file_writer(outfile, [io::create, io::truncate])); + io::file_writer(outfile, [io::create, io::truncate]/~)); writer.write_str(out); } diff --git a/src/fuzzer/ast_match.rs b/src/fuzzer/ast_match.rs index 3f2bf940aa1..3eee008d376 100644 --- a/src/fuzzer/ast_match.rs +++ b/src/fuzzer/ast_match.rs @@ -1,7 +1,7 @@ use std; import vec; -fn vec_equal(v: [T], u: [T], +fn vec_equal(v: [T]/~, u: [T]/~, element_equality_test: fn@(&&T, &&T) -> bool) -> bool { let Lv = vec::len(v); @@ -20,11 +20,11 @@ pure fn builtin_equal_int(&&a: int, &&b: int) -> bool { ret a == b; } fn main() { assert (builtin_equal(5, 5)); assert (!builtin_equal(5, 4)); - assert (!vec_equal([5, 5], [5], bind builtin_equal(_, _))); - assert (!vec_equal([5, 5], [5], builtin_equal_int)); - assert (!vec_equal([5, 5], [5, 4], builtin_equal_int)); - assert (!vec_equal([5, 5], [4, 5], builtin_equal_int)); - assert (vec_equal([5, 5], [5, 5], builtin_equal_int)); + assert (!vec_equal([5, 5]/~, [5]/~, bind builtin_equal(_, _))); + assert (!vec_equal([5, 5]/~, [5]/~, builtin_equal_int)); + assert (!vec_equal([5, 5]/~, [5, 4]/~, builtin_equal_int)); + assert (!vec_equal([5, 5]/~, [4, 5]/~, builtin_equal_int)); + assert (vec_equal([5, 5]/~, [5, 5]/~, builtin_equal_int)); #error("Pass"); } diff --git a/src/fuzzer/cycles.rs b/src/fuzzer/cycles.rs index 4773a331c13..6a456d39d70 100644 --- a/src/fuzzer/cycles.rs +++ b/src/fuzzer/cycles.rs @@ -8,7 +8,7 @@ fn under(r : rand::rng, n : uint) -> uint { } // random choice from a vec -fn choice(r : rand::rng, v : [const T]) -> T { +fn choice(r : rand::rng, v : [const T]/~) -> T { assert vec::len(v) != 0u; v[under(r, vec::len(v))] } @@ -32,8 +32,8 @@ type pointy = { mut f : fn@()->(), mut g : fn~()->(), - mut m : [maybe_pointy], - mut n : [mut maybe_pointy], + mut m : [maybe_pointy]/~, + mut n : [mut maybe_pointy]/~, mut o : {x : int, y : maybe_pointy} }; // To add: objects; ifaces; anything type-parameterized? @@ -47,8 +47,8 @@ fn empty_pointy() -> @pointy { mut f : fn@()->(){}, mut g : fn~()->(){}, - mut m : [], - mut n : [mut], + mut m : []/~, + mut n : [mut]/~, mut o : {x : 0, y : none} } } @@ -58,7 +58,7 @@ fn nop(_x: T) { } fn test_cycles(r : rand::rng, k: uint, n: uint) { - let v : [mut @pointy] = [mut]; + let v : [mut @pointy]/~ = [mut]/~; // Create a graph with no edges range(0u, vlen) {|_i| diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs index 7c59ceeef00..d99a40018f1 100644 --- a/src/fuzzer/fuzzer.rs +++ b/src/fuzzer/fuzzer.rs @@ -10,7 +10,7 @@ type context = { mode: test_mode }; // + rng fn write_file(filename: str, content: str) { result::get( - io::file_writer(filename, [io::create, io::truncate])) + io::file_writer(filename, [io::create, io::truncate]/~)) .write_str(content); } @@ -18,10 +18,10 @@ fn contains(haystack: str, needle: str) -> bool { str::contains(haystack, needle) } -fn find_rust_files(&files: [str], path: str) { +fn find_rust_files(&files: [str]/~, path: str) { if str::ends_with(path, ".rs") && !contains(path, "utf8") { // ignoring "utf8" tests because something is broken - files += [path]; + files += [path]/~; } else if os::path_is_dir(path) && !contains(path, "compile-fail") && !contains(path, "build") { @@ -32,7 +32,7 @@ fn find_rust_files(&files: [str], path: str) { } -fn common_exprs() -> [ast::expr] { +fn common_exprs() -> [ast::expr]/~ { fn dse(e: ast::expr_) -> ast::expr { { id: 0, node: e, span: ast_util::dummy_sp() } } @@ -54,7 +54,7 @@ fn common_exprs() -> [ast::expr] { @dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))), dse(ast::expr_unary(ast::uniq(ast::m_imm), @dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))) - ] + ]/~ } pure fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool { @@ -116,16 +116,16 @@ fn safe_to_steal_ty(t: @ast::ty, tm: test_mode) -> bool { // Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED) fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool, - es: @mut [ast::expr], + es: @mut [ast::expr]/~, e: @ast::expr, tm: test_mode) { if c(e, tm) { - *es += [*e]; + *es += [*e]/~; } else {/* now my indices are wrong :( */ } } fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool, - es: @mut [ast::ty], + es: @mut [ast::ty]/~, e: @ast::ty, tm: test_mode) { if c(e, tm) { @@ -133,11 +133,11 @@ fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool, } else {/* now my indices are wrong :( */ } } -type stolen_stuff = {exprs: [ast::expr], tys: [ast::ty]}; +type stolen_stuff = {exprs: [ast::expr]/~, tys: [ast::ty]/~}; fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff { - let exprs = @mut []; - let tys = @mut []; + let exprs = @mut []/~; + let tys = @mut []/~; let v = visit::mk_simple_visitor(@{ visit_expr: {|a|stash_expr_if(safe_to_steal_expr, exprs, a, tm)}, visit_ty: {|a|stash_ty_if(safe_to_steal_ty, tys, a, tm)} @@ -248,7 +248,7 @@ fn check_variants_T( codemap: codemap::codemap, filename: str, thing_label: str, - things: [T], + things: [T]/~, stringifier: fn@(@T) -> str, replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate, cx: context @@ -333,19 +333,19 @@ fn check_whole_compiler(code: str, suggested_filename_prefix: str, fn removeIfExists(filename: str) { // So sketchy! assert !contains(filename, " "); - run::program_output("bash", ["-c", "rm " + filename]); + run::program_output("bash", ["-c", "rm " + filename]/~); } fn removeDirIfExists(filename: str) { // So sketchy! assert !contains(filename, " "); - run::program_output("bash", ["-c", "rm -r " + filename]); + run::program_output("bash", ["-c", "rm -r " + filename]/~); } fn check_running(exe_filename: str) -> happiness { let p = run::program_output( "/Users/jruderman/scripts/timed_run_rust_program.py", - [exe_filename]); + [exe_filename]/~); let comb = p.out + "\n" + p.err; if str::len(comb) > 1u { log(error, "comb comb comb: " + comb); @@ -385,7 +385,7 @@ fn check_compiling(filename: str) -> happiness { let p = run::program_output( "/Users/jruderman/code/rust/build/x86_64-apple-darwin/\ stage1/bin/rustc", - [filename]); + [filename]/~); //#error("Status: %d", p.status); if p.status == 0 { @@ -419,7 +419,7 @@ fn parse_and_print(code: @str) -> str { let sess = parse::new_parse_sess(option::none); write_file(filename, *code); let crate = parse::parse_crate_from_source_str( - filename, code, [], sess); + filename, code, []/~, sess); io::with_str_reader(*code) { |rdr| as_str({|a|pprust::print_crate(sess.cm, sess.span_diagnostic, @@ -451,7 +451,7 @@ fn content_is_dangerous_to_run(code: str) -> bool { "import", // espeically fs, run "native", "unsafe", - "log"]; // python --> rust pipe deadlock? + "log"]/~; // python --> rust pipe deadlock? for dangerous_patterns.each {|p| if contains(code, p) { ret true; } } ret false; @@ -459,7 +459,7 @@ fn content_is_dangerous_to_run(code: str) -> bool { fn content_is_dangerous_to_compile(code: str) -> bool { let dangerous_patterns = - ["xfail-test"]; + ["xfail-test"]/~; for dangerous_patterns.each {|p| if contains(code, p) { ret true; } } ret false; @@ -475,7 +475,7 @@ fn content_might_not_converge(code: str) -> bool { " be ", // don't want to replace its child with a non-call: // "Non-call expression in tail call" "\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850 - ]; + ]/~; for confusing_patterns.each {|p| if contains(code, p) { ret true; } } ret false; @@ -488,7 +488,7 @@ fn file_might_not_converge(filename: str) -> bool { "block-arg-in-ternary.rs", // wrapping "move-3-unique.rs", // 0 becomes (0), but both seem reasonable. wtf? "move-3.rs" // 0 becomes (0), but both seem reasonable. wtf? - ]; + ]/~; for confusing_files.each {|f| if contains(filename, f) { ret true; } } @@ -518,12 +518,12 @@ fn check_roundtrip_convergence(code: @str, maxIters: uint) { write_file("round-trip-b.rs", *newv); run::run_program("diff", ["-w", "-u", "round-trip-a.rs", - "round-trip-b.rs"]); + "round-trip-b.rs"]/~); fail "Mismatch"; } } -fn check_convergence(files: [str]) { +fn check_convergence(files: [str]/~) { #error("pp convergence tests: %u files", vec::len(files)); for files.each {|file| if !file_might_not_converge(file) { @@ -538,7 +538,7 @@ fn check_convergence(files: [str]) { } } -fn check_variants(files: [str], cx: context) { +fn check_variants(files: [str]/~, cx: context) { for files.each {|file| if cx.mode == tm_converge && file_might_not_converge(file) { #error("Skipping convergence test based on\ @@ -562,7 +562,7 @@ fn check_variants(files: [str], cx: context) { let crate = parse::parse_crate_from_source_str( file, - s, [], sess); + s, []/~, sess); io::with_str_reader(*s) { |rdr| #error("%s", as_str({|a|pprust::print_crate(sess.cm, @@ -576,12 +576,12 @@ fn check_variants(files: [str], cx: context) { } } -fn main(args: [str]) { +fn main(args: [str]/~) { if vec::len(args) != 2u { #error("usage: %s ", args[0]); ret; } - let mut files = []; + let mut files = []/~; let root = args[1]; find_rust_files(files, root); diff --git a/src/fuzzer/ivec_fuzz.rs b/src/fuzzer/ivec_fuzz.rs index 9ea4d888fb2..0f5e95a7d73 100644 --- a/src/fuzzer/ivec_fuzz.rs +++ b/src/fuzzer/ivec_fuzz.rs @@ -8,8 +8,8 @@ Idea: provide functions for 'exhaustive' and 'random' modification of vecs. It would be nice if this could be data-driven, so the two functions could share information: - type vec_modifier = rec(fn ( v, uint i) -> [T] fun, uint lo, uint di); - const [vec_modifier] vec_modifiers = ~[rec(fun=vec_omit, 0u, 1u), ...]; + type vec_modifier = rec(fn ( v, uint i) -> [T]/~ fun, uint lo, uint di); + const [vec_modifier]/~ vec_modifiers = ~[rec(fun=vec_omit, 0u, 1u), ...]/~; But that gives me "error: internal compiler error unimplemented consts that's not a plain literal". https://github.com/graydon/rust/issues/570 @@ -24,23 +24,23 @@ import vec::slice; import vec::len; import int; -fn vec_omit(v: [T], i: uint) -> [T] { +fn vec_omit(v: [T]/~, i: uint) -> [T]/~ { slice(v, 0u, i) + slice(v, i + 1u, len(v)) } -fn vec_dup(v: [T], i: uint) -> [T] { +fn vec_dup(v: [T]/~, i: uint) -> [T]/~ { slice(v, 0u, i) + [v[i]] + slice(v, i, len(v)) } -fn vec_swadj(v: [T], i: uint) -> [T] { +fn vec_swadj(v: [T]/~, i: uint) -> [T]/~ { slice(v, 0u, i) + [v[i + 1u], v[i]] + slice(v, i + 2u, len(v)) } -fn vec_prefix(v: [T], i: uint) -> [T] { slice(v, 0u, i) } -fn vec_suffix(v: [T], i: uint) -> [T] { slice(v, i, len(v)) } +fn vec_prefix(v: [T]/~, i: uint) -> [T]/~ { slice(v, 0u, i) } +fn vec_suffix(v: [T]/~, i: uint) -> [T]/~ { slice(v, i, len(v)) } -fn vec_poke(v: [T], i: uint, x: T) -> [T] { - slice(v, 0u, i) + [x] + slice(v, i + 1u, len(v)) +fn vec_poke(v: [T]/~, i: uint, x: T) -> [T]/~ { + slice(v, 0u, i) + [x]/~ + slice(v, i + 1u, len(v)) } -fn vec_insert(v: [T], i: uint, x: T) -> [T] { - slice(v, 0u, i) + [x] + slice(v, i, len(v)) +fn vec_insert(v: [T]/~, i: uint, x: T) -> [T]/~ { + slice(v, 0u, i) + [x]/~ + slice(v, i, len(v)) } // Iterates over 0...length, skipping the specified number on each side. @@ -51,23 +51,23 @@ fn ix(skip_low: uint, skip_high: uint, length: uint, it: block(uint)) { // Returns a bunch of modified versions of v, some of which introduce // new elements (borrowed from xs). -fn vec_edits(v: [T], xs: [T]) -> [[T]] { - let edits: [[T]] = []; +fn vec_edits(v: [T]/~, xs: [T]/~) -> [[T]/~]/~ { + let edits: [[T]/~]/~ = []/~; let Lv: uint = len(v); if Lv != 1u { // When Lv == 1u, this is redundant with omit. - vec::push(edits, []); + vec::push(edits, []/~); } if Lv >= 3u { // When Lv == 2u, this is redundant with swap. vec::push(edits, vec::reversed(v)); } - ix(0u, 1u, Lv) {|i| edits += [vec_omit(v, i)]; } - ix(0u, 1u, Lv) {|i| edits += [vec_dup(v, i)]; } - ix(0u, 2u, Lv) {|i| edits += [vec_swadj(v, i)]; } - ix(1u, 2u, Lv) {|i| edits += [vec_prefix(v, i)]; } - ix(2u, 1u, Lv) {|i| edits += [vec_suffix(v, i)]; } + ix(0u, 1u, Lv) {|i| edits += [vec_omit(v, i)]/~; } + ix(0u, 1u, Lv) {|i| edits += [vec_dup(v, i)]/~; } + ix(0u, 2u, Lv) {|i| edits += [vec_swadj(v, i)]/~; } + ix(1u, 2u, Lv) {|i| edits += [vec_prefix(v, i)]/~; } + ix(2u, 1u, Lv) {|i| edits += [vec_suffix(v, i)]/~; } ix(0u, 1u, len(xs)) {|j| ix(0u, 1u, Lv) {|i| @@ -83,7 +83,7 @@ fn vec_edits(v: [T], xs: [T]) -> [[T]] { // Would be nice if this were built in: // https://github.com/graydon/rust/issues/424 -fn vec_to_str(v: [int]) -> str { +fn vec_to_str(v: [int]/~) -> str { let i = 0u; let s = "["; while i < len(v) { @@ -94,19 +94,19 @@ fn vec_to_str(v: [int]) -> str { ret s + "]"; } -fn show_edits(a: [int], xs: [int]) { +fn show_edits(a: [int]/~, xs: [int]/~) { log(error, "=== Edits of " + vec_to_str(a) + " ==="); let b = vec_edits(a, xs); ix(0u, 1u, len(b)) {|i| log(error, vec_to_str(b[i])); } } fn demo_edits() { - let xs = [7, 8]; - show_edits([], xs); - show_edits([1], xs); - show_edits([1, 2], xs); - show_edits([1, 2, 3], xs); - show_edits([1, 2, 3, 4], xs); + let xs = [7, 8]/~; + show_edits([]/~, xs); + show_edits([1]/~, xs); + show_edits([1, 2]/~, xs); + show_edits([1, 2, 3]/~, xs); + show_edits([1, 2, 3, 4]/~, xs); } fn main() { demo_edits(); } diff --git a/src/fuzzer/rand_util.rs b/src/fuzzer/rand_util.rs index 3f5c00c313f..1ef3d140c22 100644 --- a/src/fuzzer/rand_util.rs +++ b/src/fuzzer/rand_util.rs @@ -8,7 +8,7 @@ fn under(r : rand::rng, n : uint) -> uint { } // random choice from a vec -fn choice(r : rand::rng, v : [T]) -> T { +fn choice(r : rand::rng, v : [T]/~) -> T { assert vec::len(v) != 0u; v[under(r, vec::len(v))] } @@ -16,7 +16,7 @@ fn choice(r : rand::rng, v : [T]) -> T { fn unlikely(r : rand::rng, n : uint) -> bool { under(r, n) == 0u } // shuffle a vec in place -fn shuffle(r : rand::rng, &v : [mut T]) { +fn shuffle(r : rand::rng, &v : [mut T]/~) { let i = vec::len(v); while i >= 2u { // Loop invariant: elements with index >= i have been locked in place. @@ -26,20 +26,20 @@ fn shuffle(r : rand::rng, &v : [mut T]) { } // create a shuffled copy of a vec -fn shuffled(r : rand::rng, v : [T]) -> [T] { +fn shuffled(r : rand::rng, v : [T]/~) -> [T]/~ { let w = vec::to_mut(v); shuffle(r, w); vec::from_mut(w) // Shouldn't this happen automatically? } // sample from a population without replacement -//fn sample(r : rand::rng, pop : [T], k : uint) -> [T] { fail } +//fn sample(r : rand::rng, pop : [T]/~, k : uint) -> [T]/~ { fail } // Two ways to make a weighted choice. // * weighted_choice is O(number of choices) time // * weighted_vec is O(total weight) space type weighted = { weight: uint, item: T }; -fn weighted_choice(r : rand::rng, v : [weighted]) -> T { +fn weighted_choice(r : rand::rng, v : [weighted]/~) -> T { assert vec::len(v) != 0u; let total = 0u; for {weight: weight, item: _} in v { @@ -57,8 +57,8 @@ fn weighted_choice(r : rand::rng, v : [weighted]) -> T { core::unreachable(); } -fn weighted_vec(v : [weighted]) -> [T] { - let r = []; +fn weighted_vec(v : [weighted]/~) -> [T]/~ { + let r = []/~; for {weight: weight, item: item} in v { let i = 0u; while i < weight { @@ -74,10 +74,10 @@ fn main() let r = rand::mk_rng(); log(error, under(r, 5u)); - log(error, choice(r, [10, 20, 30])); + log(error, choice(r, [10, 20, 30]/~)); log(error, if unlikely(r, 5u) { "unlikely" } else { "likely" }); - let a = [mut 1, 2, 3]; + let a = [mut 1, 2, 3]/~; shuffle(r, a); log(error, a); @@ -86,7 +86,7 @@ fn main() {weight:1u, item:"low"}, {weight:8u, item:"middle"}, {weight:1u, item:"high"} - ]; + ]/~; let w = weighted_vec(v); while i < 1000u { diff --git a/src/libcore/arc.rs b/src/libcore/arc.rs index 7e8d69298bd..f13a34d260e 100644 --- a/src/libcore/arc.rs +++ b/src/libcore/arc.rs @@ -170,7 +170,7 @@ mod tests { #[test] fn manually_share_arc() { - let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~; let arc_v = arc::arc(v); let p = port(); @@ -182,7 +182,7 @@ mod tests { let arc_v = p.recv(); - let v = *arc::get::<[int]>(&arc_v); + let v = *arc::get::<[int]/~>(&arc_v); assert v[3] == 4; }; @@ -196,7 +196,7 @@ mod tests { #[test] fn auto_share_arc() { - let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~; let (_res, arc_c) = shared_arc(v); let p = port(); @@ -216,7 +216,7 @@ mod tests { #[test] #[ignore] // this can probably infinite loop too. fn exclusive_arc() { - let mut futures = []; + let mut futures = []/~; let num_tasks = 10u; let count = 1000u; @@ -231,7 +231,7 @@ mod tests { **count += 1u; } } - })]; + })]/~; }; for futures.each {|f| f.get() }; diff --git a/src/libcore/char.rs b/src/libcore/char.rs index 9aa1614114b..ffead7a523b 100644 --- a/src/libcore/char.rs +++ b/src/libcore/char.rs @@ -65,7 +65,7 @@ pure fn is_uppercase(c: char) -> bool { #[doc = " Indicates whether a character is whitespace, defined in terms of the Unicode General Categories 'Zs', 'Zl', 'Zp' -additional 'Cc'-category control codes in the range [0x09, 0x0d] +additional 'Cc'-category control codes in the range [0x09, 0x0d]/~ "] pure fn is_whitespace(c: char) -> bool { ret ('\x09' <= c && c <= '\x0d') @@ -128,8 +128,8 @@ Return the hexadecimal unicode escape of a char. The rules are as follows: - - chars in [0,0xff] get 2-digit escapes: `\\xNN` - - chars in [0x100,0xffff] get 4-digit escapes: `\\uNNNN` + - chars in [0,0xff]/~ get 2-digit escapes: `\\xNN` + - chars in [0x100,0xffff]/~ get 4-digit escapes: `\\uNNNN` - chars above 0x10000 get 8-digit escapes: `\\UNNNNNNNN` "] fn escape_unicode(c: char) -> str { @@ -154,7 +154,7 @@ languages. The exact rules are: - Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively. - Single-quote, double-quote and backslash chars are backslash-escaped. - - Any other chars in the range [0x20,0x7e] are not escaped. + - Any other chars in the range [0x20,0x7e]/~ are not escaped. - Any other chars are given hex unicode escapes; see `escape_unicode`. "] fn escape_default(c: char) -> str { diff --git a/src/libcore/comm.rs b/src/libcore/comm.rs index 8cc82e56b8f..4a01ccea324 100644 --- a/src/libcore/comm.rs +++ b/src/libcore/comm.rs @@ -217,7 +217,7 @@ fn peek_(p: *rust_port) -> bool { #[doc = "Receive on one of two ports"] fn select2(p_a: port, p_b: port) -> either { - let ports = [(**p_a).po, (**p_b).po]; + let ports = [(**p_a).po, (**p_b).po]/~; let n_ports = 2 as libc::size_t; let yield = 0u, yieldp = ptr::addr_of(yield); diff --git a/src/libcore/dvec.rs b/src/libcore/dvec.rs index 85d61d3f606..c94d0dc718b 100644 --- a/src/libcore/dvec.rs +++ b/src/libcore/dvec.rs @@ -32,7 +32,7 @@ may permit read-only access during iteration or other use. # WARNING For maximum performance, this type is implemented using some rather -unsafe code. In particular, this innocent looking `[mut A]` pointer +unsafe code. In particular, this innocent looking `[mut A]/~` pointer *may be null!* Therefore, it is important you not reach into the data structure manually but instead use the provided extensions. @@ -48,27 +48,26 @@ type could only produce 47 million pushes/second. "] type dvec = { - - mut data: [mut A] + mut data: [mut A]/~ }; #[doc = "Creates a new, empty dvec"] fn dvec() -> dvec { - {mut data: [mut]} + {mut data: [mut]/~} } #[doc = "Creates a new dvec with a single element"] fn from_elt(+e: A) -> dvec { - {mut data: [mut e]} + {mut data: [mut e]/~} } #[doc = "Creates a new dvec with the contents of a vector"] -fn from_vec(+v: [mut A]) -> dvec { +fn from_vec(+v: [mut A]/~) -> dvec { {mut data: v} } #[doc = "Consumes the vector and returns its contents"] -fn unwrap(-d: dvec) -> [mut A] { +fn unwrap(-d: dvec) -> [mut A]/~ { let {data: v} <- d; ret v; } @@ -84,7 +83,7 @@ impl private_methods for dvec { } #[inline(always)] - fn borrow(f: fn(-[mut A]) -> B) -> B { + fn borrow(f: fn(-[mut A]/~) -> B) -> B { unsafe { let mut data = unsafe::reinterpret_cast(null::<()>()); data <-> self.data; @@ -95,7 +94,7 @@ impl private_methods for dvec { } #[inline(always)] - fn return(-data: [mut A]) { + fn return(-data: [mut A]/~) { unsafe { self.data <- data; } @@ -114,7 +113,7 @@ impl extensions for dvec { "] #[inline(always)] - fn swap(f: fn(-[mut A]) -> [mut A]) { + fn swap(f: fn(-[mut A]/~) -> [mut A]/~) { self.borrow { |v| self.return(f(v)) } } @@ -128,7 +127,7 @@ impl extensions for dvec { } #[doc = "Overwrite the current contents"] - fn set(+w: [mut A]) { + fn set(+w: [mut A]/~) { self.check_not_borrowed(); self.data <- w; } @@ -151,7 +150,7 @@ impl extensions for dvec { let data_ptr: *() = unsafe::reinterpret_cast(data); if data_ptr.is_null() { fail "Recursive use of dvec"; } log(error, "a"); - self.data <- [mut t] + data; + self.data <- [mut t]/~ + data; log(error, "b"); } } @@ -219,7 +218,7 @@ impl extensions for dvec { } }; - for ts.each { |t| v += [t] }; + for ts.each { |t| v += [t]/~ }; v } } @@ -229,7 +228,7 @@ impl extensions for dvec { See `unwrap()` if you do not wish to copy the contents. "] - fn get() -> [A] { + fn get() -> [A]/~ { self.borrow { |v| let w = vec::from_mut(copy v); self.return(v); @@ -271,4 +270,4 @@ impl extensions for dvec { fn last() -> A { self.get_elt(self.len() - 1u) } -} \ No newline at end of file +} diff --git a/src/libcore/either.rs b/src/libcore/either.rs index 00a39416748..564966c1493 100644 --- a/src/libcore/either.rs +++ b/src/libcore/either.rs @@ -21,28 +21,28 @@ fn either(f_left: fn(T) -> V, alt value { left(l) { f_left(l) } right(r) { f_right(r) } } } -fn lefts(eithers: [either]) -> [T] { +fn lefts(eithers: [either]/~) -> [T]/~ { #[doc = "Extracts from a vector of either all the left values"]; - let mut result: [T] = []; + let mut result: [T]/~ = []/~; for vec::each(eithers) {|elt| - alt elt { left(l) { result += [l]; } _ {/* fallthrough */ } } + alt elt { left(l) { result += [l]/~; } _ {/* fallthrough */ } } } ret result; } -fn rights(eithers: [either]) -> [U] { +fn rights(eithers: [either]/~) -> [U]/~ { #[doc = "Extracts from a vector of either all the right values"]; - let mut result: [U] = []; + let mut result: [U]/~ = []/~; for vec::each(eithers) {|elt| - alt elt { right(r) { result += [r]; } _ {/* fallthrough */ } } + alt elt { right(r) { result += [r]/~; } _ {/* fallthrough */ } } } ret result; } -fn partition(eithers: [either]) - -> {lefts: [T], rights: [U]} { +fn partition(eithers: [either]/~) + -> {lefts: [T]/~, rights: [U]/~} { #[doc = " Extracts from a vector of either all the left values and right values @@ -50,10 +50,10 @@ fn partition(eithers: [either]) right values. "]; - let mut lefts: [T] = []; - let mut rights: [U] = []; + let mut lefts: [T]/~ = []/~; + let mut rights: [U]/~ = []/~; for vec::each(eithers) {|elt| - alt elt { left(l) { lefts += [l]; } right(r) { rights += [r]; } } + alt elt { left(l) { lefts += [l]/~; } right(r) { rights += [r]/~; } } } ret {lefts: lefts, rights: rights}; } @@ -112,49 +112,49 @@ fn test_either_right() { #[test] fn test_lefts() { - let input = [left(10), right(11), left(12), right(13), left(14)]; + let input = [left(10), right(11), left(12), right(13), left(14)]/~; let result = lefts(input); - assert (result == [10, 12, 14]); + assert (result == [10, 12, 14]/~); } #[test] fn test_lefts_none() { - let input: [either] = [right(10), right(10)]; + let input: [either]/~ = [right(10), right(10)]/~; let result = lefts(input); assert (vec::len(result) == 0u); } #[test] fn test_lefts_empty() { - let input: [either] = []; + let input: [either]/~ = []/~; let result = lefts(input); assert (vec::len(result) == 0u); } #[test] fn test_rights() { - let input = [left(10), right(11), left(12), right(13), left(14)]; + let input = [left(10), right(11), left(12), right(13), left(14)]/~; let result = rights(input); - assert (result == [11, 13]); + assert (result == [11, 13]/~); } #[test] fn test_rights_none() { - let input: [either] = [left(10), left(10)]; + let input: [either]/~ = [left(10), left(10)]/~; let result = rights(input); assert (vec::len(result) == 0u); } #[test] fn test_rights_empty() { - let input: [either] = []; + let input: [either]/~ = []/~; let result = rights(input); assert (vec::len(result) == 0u); } #[test] fn test_partition() { - let input = [left(10), right(11), left(12), right(13), left(14)]; + let input = [left(10), right(11), left(12), right(13), left(14)]/~; let result = partition(input); assert (result.lefts[0] == 10); assert (result.lefts[1] == 12); @@ -165,7 +165,7 @@ fn test_partition() { #[test] fn test_partition_no_lefts() { - let input: [either] = [right(10), right(11)]; + let input: [either]/~ = [right(10), right(11)]/~; let result = partition(input); assert (vec::len(result.lefts) == 0u); assert (vec::len(result.rights) == 2u); @@ -173,7 +173,7 @@ fn test_partition_no_lefts() { #[test] fn test_partition_no_rights() { - let input: [either] = [left(10), left(11)]; + let input: [either]/~ = [left(10), left(11)]/~; let result = partition(input); assert (vec::len(result.lefts) == 2u); assert (vec::len(result.rights) == 0u); @@ -181,7 +181,7 @@ fn test_partition_no_rights() { #[test] fn test_partition_empty() { - let input: [either] = []; + let input: [either]/~ = []/~; let result = partition(input); assert (vec::len(result.lefts) == 0u); assert (vec::len(result.rights) == 0u); diff --git a/src/libcore/extfmt.rs b/src/libcore/extfmt.rs index 175974f09cb..b1bb6d80a7a 100644 --- a/src/libcore/extfmt.rs +++ b/src/libcore/extfmt.rs @@ -9,12 +9,12 @@ The 'fmt' extension is modeled on the posix printf system. A posix conversion ostensibly looks like this -> %[parameter][flags][width][.precision][length]type +> %[parameter]/~[flags]/~[width]/~[.precision]/~[length]/~type Given the different numeric type bestiary we have, we omit the 'length' parameter and support slightly different conversions for 'type' -> %[parameter][flags][width][.precision]type +> %[parameter]/~[flags]/~[width]/~[.precision]/~type we also only support translating-to-rust a tiny subset of the possible combinations at the moment. @@ -71,7 +71,7 @@ mod ct { // A formatted conversion from an expression to a string type conv = {param: option, - flags: [flag], + flags: [flag]/~, width: count, precision: count, ty: ty}; @@ -81,14 +81,14 @@ mod ct { enum piece { piece_string(str), piece_conv(conv), } type error_fn = fn@(str) -> ! ; - fn parse_fmt_string(s: str, error: error_fn) -> [piece] { - let mut pieces: [piece] = []; + fn parse_fmt_string(s: str, error: error_fn) -> [piece]/~ { + let mut pieces: [piece]/~ = []/~; let lim = str::len(s); let mut buf = ""; - fn flush_buf(buf: str, &pieces: [piece]) -> str { + fn flush_buf(buf: str, &pieces: [piece]/~) -> str { if str::len(buf) > 0u { let piece = piece_string(buf); - pieces += [piece]; + pieces += [piece]/~; } ret ""; } @@ -108,7 +108,7 @@ mod ct { } else { buf = flush_buf(buf, pieces); let rs = parse_conversion(s, i, lim, error); - pieces += [rs.piece]; + pieces += [rs.piece]/~; i = rs.next; } } else { buf += curr; i += size; } @@ -162,16 +162,16 @@ mod ct { }; } fn parse_flags(s: str, i: uint, lim: uint) -> - {flags: [flag], next: uint} { - let noflags: [flag] = []; + {flags: [flag]/~, next: uint} { + let noflags: [flag]/~ = []/~; if i >= lim { ret {flags: noflags, next: i}; } fn more_(f: flag, s: str, i: uint, lim: uint) -> - {flags: [flag], next: uint} { + {flags: [flag]/~, next: uint} { let next = parse_flags(s, i + 1u, lim); let rest = next.flags; let j = next.next; - let curr: [flag] = [f]; + let curr: [flag]/~ = [f]/~; ret {flags: curr + rest, next: j}; } let more = {|x|more_(x, s, i, lim)}; @@ -262,7 +262,7 @@ mod ct { // Functions used by the fmt extension at runtime. For now there are a lot of // decisions made a runtime. If it proves worthwhile then some of these // conditions can be evaluated at compile-time. For now though it's cleaner to -// implement it this way, I think. +// implement it 0this way, I think. mod rt { enum flag { flag_left_justify, @@ -276,7 +276,7 @@ mod rt { // FIXME (#1993): May not want to use a vector here for flags; instead // just use a bool per flag. - type conv = {flags: [flag], width: count, precision: count, ty: ty}; + type conv = {flags: [flag]/~, width: count, precision: count, ty: ty}; fn conv_int(cv: conv, i: int) -> str { let radix = 10u; @@ -430,12 +430,13 @@ mod rt { } ret padstr + s; } - fn have_flag(flags: [flag], f: flag) -> bool { + fn have_flag(flags: [flag]/~, f: flag) -> bool { for vec::each(flags) {|candidate| if candidate == f { ret true; } } ret false; } } + // Local Variables: // mode: rust; // fill-column: 78; diff --git a/src/libcore/float.rs b/src/libcore/float.rs index cd6e830ab4f..5f6da4c1674 100644 --- a/src/libcore/float.rs +++ b/src/libcore/float.rs @@ -116,10 +116,10 @@ fn to_str_common(num: float, digits: uint, exact: bool) -> str { let mut frac = num - (trunc as float); // stack of digits - let mut fractionalParts = []; + let mut fractionalParts = []/~; // FIXME: (#2608) - // This used to return right away without rounding, as "[-]num", + // This used to return right away without rounding, as "[-]/~num", // but given epsilon like in f64.rs, I don't see how the comparison // to epsilon did much when only used there. // if (frac < epsilon && !exact) || digits == 0u { ret accum; } @@ -236,7 +236,7 @@ Leading and trailing whitespace are ignored. # Return value `none` if the string did not represent a valid number. Otherwise, `some(n)` -where `n` is the floating-point number represented by `[num]`. +where `n` is the floating-point number represented by `[num]/~`. "] fn from_str(num: str) -> option { if num == "inf" { @@ -261,7 +261,7 @@ fn from_str(num: str) -> option { _ { ret none; } } - //Determine if first char is '-'/'+'. Set [pos] and [neg] accordingly. + //Determine if first char is '-'/'+'. Set [pos]/~ and [neg]/~ accordingly. let mut neg = false; //Sign of the result alt str::char_at(num, 0u) { '-' { @@ -345,7 +345,7 @@ fn from_str(num: str) -> option { pos = char_range.next; } let multiplier = pow_with_uint(10u, exponent); - //Note: not [int::pow], otherwise, we'll quickly + //Note: not [int::pow]/~, otherwise, we'll quickly //end up with a nice overflow if neg_exponent { total = total / multiplier; diff --git a/src/libcore/int-template.rs b/src/libcore/int-template.rs index 7f3d385dca0..5721ab750b4 100644 --- a/src/libcore/int-template.rs +++ b/src/libcore/int-template.rs @@ -66,7 +66,7 @@ Parse a buffer of bytes * buf - A byte buffer * radix - The base of the number "] -fn parse_buf(buf: [u8], radix: uint) -> option { +fn parse_buf(buf: [u8]/~, radix: uint) -> option { if vec::len(buf) == 0u { ret none; } let mut i = vec::len(buf) - 1u; let mut start = 0u; diff --git a/src/libcore/int-template/int.rs b/src/libcore/int-template/int.rs index d28333c79e6..2557b1253b7 100644 --- a/src/libcore/int-template/int.rs +++ b/src/libcore/int-template/int.rs @@ -11,7 +11,7 @@ pure fn hash(&&x: int) -> uint { ret x as uint; } #[doc = "Returns `base` raised to the power of `exponent`"] fn pow(base: int, exponent: uint) -> int { - if exponent == 0u { ret 1; } //Not mathemtically true if [base == 0] + if exponent == 0u { ret 1; } //Not mathemtically true if [base == 0]/~ if base == 0 { ret 0; } let mut my_pow = exponent; let mut acc = 1; diff --git a/src/libcore/io.rs b/src/libcore/io.rs index 5fda13bcd70..ff7bdb2b2d6 100644 --- a/src/libcore/io.rs +++ b/src/libcore/io.rs @@ -30,7 +30,7 @@ enum seek_style { seek_set, seek_end, seek_cur, } // The raw underlying reader iface. All readers must implement this. iface reader { // FIXME (#2004): Seekable really should be orthogonal. - fn read_bytes(uint) -> [u8]; + fn read_bytes(uint) -> [u8]/~; fn read_byte() -> int; fn unread_byte(int); fn eof() -> bool; @@ -41,9 +41,9 @@ iface reader { // Generic utility functions defined on readers impl reader_util for reader { - fn read_chars(n: uint) -> [char] { + fn read_chars(n: uint) -> [char]/~ { // returns the (consumed offset, n_req), appends characters to &chars - fn chars_from_buf(buf: [u8], &chars: [char]) -> (uint, uint) { + fn chars_from_buf(buf: [u8]/~, &chars: [char]/~) -> (uint, uint) { let mut i = 0u; while i < vec::len(buf) { let b0 = buf[i]; @@ -52,7 +52,7 @@ impl reader_util for reader { i += 1u; assert (w > 0u); if w == 1u { - chars += [ b0 as char ]; + chars += [ b0 as char ]/~; cont; } // can't satisfy this char with the existing data @@ -71,12 +71,12 @@ impl reader_util for reader { // See str::char_at val += ((b0 << ((w + 1u) as u8)) as uint) << (w - 1u) * 6u - w - 1u; - chars += [ val as char ]; + chars += [ val as char ]/~; } ret (i, 0u); } - let mut buf: [u8] = []; - let mut chars: [char] = []; + let mut buf: [u8]/~ = []/~; + let mut chars: [char]/~ = []/~; // might need more bytes, but reading n will never over-read let mut nbread = n; while nbread > 0u { @@ -110,20 +110,20 @@ impl reader_util for reader { } fn read_line() -> str { - let mut buf: [u8] = []; + let mut buf: [u8]/~ = []/~; loop { let ch = self.read_byte(); if ch == -1 || ch == 10 { break; } - buf += [ch as u8]; + buf += [ch as u8]/~; } str::from_bytes(buf) } fn read_c_str() -> str { - let mut buf: [u8] = []; + let mut buf: [u8]/~ = []/~; loop { let ch = self.read_byte(); - if ch < 1 { break; } else { buf += [ch as u8]; } + if ch < 1 { break; } else { buf += [ch as u8]/~; } } str::from_bytes(buf) } @@ -156,8 +156,8 @@ impl reader_util for reader { val } - fn read_whole_stream() -> [u8] { - let mut buf: [u8] = []; + fn read_whole_stream() -> [u8]/~ { + let mut buf: [u8]/~ = []/~; while !self.eof() { buf += self.read_bytes(2048u); } buf } @@ -192,8 +192,8 @@ fn convert_whence(whence: seek_style) -> i32 { } impl of reader for *libc::FILE { - fn read_bytes(len: uint) -> [u8] { - let mut buf : [mut u8] = [mut]; + fn read_bytes(len: uint) -> [u8]/~ { + let mut buf : [mut u8]/~ = [mut]/~; vec::reserve(buf, len); vec::as_mut_buf(buf) {|b| let read = libc::fread(b as *mut c_void, 1u as size_t, @@ -216,7 +216,7 @@ impl of reader for *libc::FILE { // duration of its lifetime. // FIXME there really should be a better way to do this // #2004 impl of reader for {base: T, cleanup: C} { - fn read_bytes(len: uint) -> [u8] { self.base.read_bytes(len) } + fn read_bytes(len: uint) -> [u8]/~ { self.base.read_bytes(len) } fn read_byte() -> int { self.base.read_byte() } fn unread_byte(byte: int) { self.base.unread_byte(byte); } fn eof() -> bool { self.base.eof() } @@ -260,10 +260,10 @@ fn file_reader(path: str) -> result { // Byte buffer readers // TODO: const u8, but this fails with rustboot. -type byte_buf = {buf: [u8], mut pos: uint, len: uint}; +type byte_buf = {buf: [u8]/~, mut pos: uint, len: uint}; impl of reader for byte_buf { - fn read_bytes(len: uint) -> [u8] { + fn read_bytes(len: uint) -> [u8]/~ { let rest = self.len - self.pos; let mut to_read = len; if rest < to_read { to_read = rest; } @@ -286,19 +286,19 @@ impl of reader for byte_buf { fn tell() -> uint { self.pos } } -fn bytes_reader(bytes: [u8]) -> reader { +fn bytes_reader(bytes: [u8]/~) -> reader { bytes_reader_between(bytes, 0u, vec::len(bytes)) } -fn bytes_reader_between(bytes: [u8], start: uint, end: uint) -> reader { +fn bytes_reader_between(bytes: [u8]/~, start: uint, end: uint) -> reader { {buf: bytes, mut pos: start, len: end} as reader } -fn with_bytes_reader(bytes: [u8], f: fn(reader) -> t) -> t { +fn with_bytes_reader(bytes: [u8]/~, f: fn(reader) -> t) -> t { f(bytes_reader(bytes)) } -fn with_bytes_reader_between(bytes: [u8], start: uint, end: uint, +fn with_bytes_reader_between(bytes: [u8]/~, start: uint, end: uint, f: fn(reader) -> t) -> t { f(bytes_reader_between(bytes, start, end)) } @@ -402,7 +402,7 @@ fn fd_writer(fd: fd_t, cleanup: bool) -> writer { } -fn mk_file_writer(path: str, flags: [fileflag]) +fn mk_file_writer(path: str, flags: [fileflag]/~) -> result { #[cfg(windows)] @@ -451,9 +451,9 @@ fn u64_to_le_bytes(n: u64, size: uint, f: fn([u8]/&) -> T) -> T { (n >> 56) as u8]/&) } _ { - let mut bytes: [u8] = [], i = size, n = n; + let mut bytes: [u8]/~ = []/~, i = size, n = n; while i > 0u { - bytes += [(n & 255_u64) as u8]; + bytes += [(n & 255_u64) as u8]/~; n >>= 8_u64; i -= 1u; } @@ -481,11 +481,11 @@ fn u64_to_be_bytes(n: u64, size: uint, f: fn([u8]/&) -> T) -> T { (n >> 8) as u8, n as u8]/&) } _ { - let mut bytes: [u8] = []; + let mut bytes: [u8]/~ = []/~; let mut i = size; while i > 0u { let shift = ((i - 1u) * 8u) as u64; - bytes += [(n >> shift) as u8]; + bytes += [(n >> shift) as u8]/~; i -= 1u; } f(bytes) @@ -493,7 +493,7 @@ fn u64_to_be_bytes(n: u64, size: uint, f: fn([u8]/&) -> T) -> T { } } -fn u64_from_be_bytes(data: [u8], start: uint, size: uint) -> u64 { +fn u64_from_be_bytes(data: [u8]/~, start: uint, size: uint) -> u64 { let mut sz = size; assert (sz <= 8u); let mut val = 0_u64; @@ -577,7 +577,7 @@ impl writer_util for writer { fn write_u8(n: u8) { self.write([n]/&) } } -fn file_writer(path: str, flags: [fileflag]) -> result { +fn file_writer(path: str, flags: [fileflag]/~) -> result { result::chain(mk_file_writer(path, flags), { |w| result::ok(w)}) } @@ -638,7 +638,7 @@ fn mem_buffer() -> mem_buffer { @{buf: dvec(), mut pos: 0u} } fn mem_buffer_writer(b: mem_buffer) -> writer { b as writer } -fn mem_buffer_buf(b: mem_buffer) -> [u8] { b.buf.get() } +fn mem_buffer_buf(b: mem_buffer) -> [u8]/~ { b.buf.get() } fn mem_buffer_str(b: mem_buffer) -> str { str::from_bytes(b.buf.get()) } @@ -650,7 +650,7 @@ fn with_str_writer(f: fn(writer)) -> str { io::mem_buffer_str(buf) } -fn with_buf_writer(f: fn(writer)) -> [u8] { +fn with_buf_writer(f: fn(writer)) -> [u8]/~ { let buf = mem_buffer(); let wr = mem_buffer_writer(buf); f(wr); @@ -679,7 +679,7 @@ fn read_whole_file_str(file: str) -> result { // FIXME (#2004): implement this in a low-level way. Going through the // abstractions is pointless. -fn read_whole_file(file: str) -> result<[u8], str> { +fn read_whole_file(file: str) -> result<[u8]/~, str> { result::chain(file_reader(file), { |rdr| result::ok(rdr.read_whole_stream()) }) @@ -772,7 +772,7 @@ mod tests { { let out: io::writer = result::get( - io::file_writer(tmpfile, [io::create, io::truncate])); + io::file_writer(tmpfile, [io::create, io::truncate]/~)); out.write_str(frood); } let inp: io::reader = result::get(io::file_reader(tmpfile)); @@ -784,22 +784,22 @@ mod tests { #[test] fn test_readchars_empty() { let inp : io::reader = io::str_reader(""); - let res : [char] = inp.read_chars(128u); + let res : [char]/~ = inp.read_chars(128u); assert(vec::len(res) == 0u); } #[test] fn test_readchars_wide() { let wide_test = "生锈的汤匙切肉汤hello生锈的汤匙切肉汤"; - let ivals : [int] = [ + let ivals : [int]/~ = [ 29983, 38152, 30340, 27748, 21273, 20999, 32905, 27748, 104, 101, 108, 108, 111, 29983, 38152, 30340, 27748, - 21273, 20999, 32905, 27748]; - fn check_read_ln(len : uint, s: str, ivals: [int]) { + 21273, 20999, 32905, 27748]/~; + fn check_read_ln(len : uint, s: str, ivals: [int]/~) { let inp : io::reader = io::str_reader(s); - let res : [char] = inp.read_chars(len); + let res : [char]/~ = inp.read_chars(len); if (len <= vec::len(ivals)) { assert(vec::len(res) == len); } @@ -841,7 +841,7 @@ mod tests { #[test] fn file_writer_bad_name() { - alt io::file_writer("?/?", []) { + alt io::file_writer("?/?", []/~) { result::err(e) { assert str::starts_with(e, "error opening ?/?"); } @@ -862,16 +862,16 @@ mod tests { #[test] fn mem_buffer_overwrite() { let mbuf = mem_buffer(); - mbuf.write([0u8, 1u8, 2u8, 3u8]); - assert mem_buffer_buf(mbuf) == [0u8, 1u8, 2u8, 3u8]; + mbuf.write([0u8, 1u8, 2u8, 3u8]/~); + assert mem_buffer_buf(mbuf) == [0u8, 1u8, 2u8, 3u8]/~; mbuf.seek(-2, seek_cur); - mbuf.write([4u8, 5u8, 6u8, 7u8]); - assert mem_buffer_buf(mbuf) == [0u8, 1u8, 4u8, 5u8, 6u8, 7u8]; + mbuf.write([4u8, 5u8, 6u8, 7u8]/~); + assert mem_buffer_buf(mbuf) == [0u8, 1u8, 4u8, 5u8, 6u8, 7u8]/~; mbuf.seek(-2, seek_end); - mbuf.write([8u8]); + mbuf.write([8u8]/~); mbuf.seek(1, seek_set); - mbuf.write([9u8]); - assert mem_buffer_buf(mbuf) == [0u8, 9u8, 4u8, 5u8, 8u8, 7u8]; + mbuf.write([9u8]/~); + assert mem_buffer_buf(mbuf) == [0u8, 9u8, 4u8, 5u8, 8u8, 7u8]/~; } } diff --git a/src/libcore/iter-trait.rs b/src/libcore/iter-trait.rs index e58aa05f079..59958e505a4 100644 --- a/src/libcore/iter-trait.rs +++ b/src/libcore/iter-trait.rs @@ -19,14 +19,14 @@ impl extensions of iter::base_iter for IMPL_T { } impl extensions for IMPL_T { - fn filter_to_vec(pred: fn(A) -> bool) -> [A] { + fn filter_to_vec(pred: fn(A) -> bool) -> [A]/~ { iter::filter_to_vec(self, pred) } - fn map_to_vec(op: fn(A) -> B) -> [B] { iter::map_to_vec(self, op) } - fn to_vec() -> [A] { iter::to_vec(self) } + fn map_to_vec(op: fn(A) -> B) -> [B]/~ { iter::map_to_vec(self, op) } + fn to_vec() -> [A]/~ { iter::to_vec(self) } // FIXME--bug in resolve prevents this from working (#2611) - // fn flat_map_to_vec>(op: fn(A) -> IB) -> [B] { + // fn flat_map_to_vec>(op: fn(A) -> IB) -> [B]/~ { // iter::flat_map_to_vec(self, op) // } diff --git a/src/libcore/iter.rs b/src/libcore/iter.rs index 070e909c626..d6380a2968a 100644 --- a/src/libcore/iter.rs +++ b/src/libcore/iter.rs @@ -26,8 +26,8 @@ fn any>(self: IA, blk: fn(A) -> bool) -> bool { } fn filter_to_vec>(self: IA, - prd: fn(A) -> bool) -> [A] { - let mut result = []; + prd: fn(A) -> bool) -> [A]/~ { + let mut result = []/~; self.size_hint().iter {|hint| vec::reserve(result, hint); } for self.each {|a| if prd(a) { vec::push(result, a); } @@ -35,8 +35,8 @@ fn filter_to_vec>(self: IA, ret result; } -fn map_to_vec>(self: IA, op: fn(A) -> B) -> [B] { - let mut result = []; +fn map_to_vec>(self: IA, op: fn(A) -> B) -> [B]/~ { + let mut result = []/~; self.size_hint().iter {|hint| vec::reserve(result, hint); } for self.each {|a| vec::push(result, op(a)); @@ -45,9 +45,9 @@ fn map_to_vec>(self: IA, op: fn(A) -> B) -> [B] { } fn flat_map_to_vec,IB:base_iter>( - self: IA, op: fn(A) -> IB) -> [B] { + self: IA, op: fn(A) -> IB) -> [B]/~ { - let mut result = []; + let mut result = []/~; for self.each {|a| for op(a).each {|b| vec::push(result, b); @@ -64,8 +64,8 @@ fn foldl>(self: IA, +b0: B, blk: fn(B, A) -> B) -> B { ret b; } -fn to_vec>(self: IA) -> [A] { - foldl::(self, [], {|r, a| r + [a]}) +fn to_vec>(self: IA) -> [A]/~ { + foldl::(self, []/~, {|r, a| r + [a]/~}) } fn contains>(self: IA, x: A) -> bool { @@ -135,17 +135,17 @@ fn test_enumerate() { #[test] fn test_map_and_to_vec() { - let a = bind vec::iter([0, 1, 2], _); + let a = bind vec::iter([0, 1, 2]/~, _); let b = bind map(a, {|i| 2*i}, _); let c = to_vec(b); - assert c == [0, 2, 4]; + assert c == [0, 2, 4]/~; } #[test] fn test_map_directly_on_vec() { - let b = bind map([0, 1, 2], {|i| 2*i}, _); + let b = bind map([0, 1, 2]/~, {|i| 2*i}, _); let c = to_vec(b); - assert c == [0, 2, 4]; + assert c == [0, 2, 4]/~; } #[test] @@ -155,7 +155,7 @@ fn test_filter_on_int_range() { } let l = to_vec(bind filter(bind int::range(0, 10, _), is_even, _)); - assert l == [0, 2, 4, 6, 8]; + assert l == [0, 2, 4, 6, 8]/~; } #[test] @@ -165,7 +165,7 @@ fn test_filter_on_uint_range() { } let l = to_vec(bind filter(bind uint::range(0u, 10u, _), is_even, _)); - assert l == [0u, 2u, 4u, 6u, 8u]; + assert l == [0u, 2u, 4u, 6u, 8u]/~; } #[test] @@ -180,7 +180,7 @@ fn test_filter_map() { let l = to_vec(bind filter_map( bind int::range(0, 5, _), negativate_the_evens, _)); - assert l == [0, -2, -4]; + assert l == [0, -2, -4]/~; } #[test] @@ -190,70 +190,70 @@ fn test_flat_map_with_option() { else { none } } - let a = bind vec::iter([0, 1, 2], _); + let a = bind vec::iter([0, 1, 2]/~, _); let b = bind flat_map(a, if_even, _); let c = to_vec(b); - assert c == [0, 2]; + assert c == [0, 2]/~; } #[test] fn test_flat_map_with_list() { - fn repeat(&&i: int) -> [int] { - let mut r = []; - int::range(0, i) {|_j| r += [i]; } + fn repeat(&&i: int) -> [int]/~ { + let mut r = []/~; + int::range(0, i) {|_j| r += [i]/~; } r } - let a = bind vec::iter([0, 1, 2, 3], _); + let a = bind vec::iter([0, 1, 2, 3]/~, _); let b = bind flat_map(a, repeat, _); let c = to_vec(b); #debug["c = %?", c]; - assert c == [1, 2, 2, 3, 3, 3]; + assert c == [1, 2, 2, 3, 3, 3]/~; } #[test] fn test_repeat() { - let mut c = [], i = 0u; + let mut c = []/~, i = 0u; repeat(5u) {|| - c += [(i * i)]; + c += [(i * i)]/~; i += 1u; }; #debug["c = %?", c]; - assert c == [0u, 1u, 4u, 9u, 16u]; + assert c == [0u, 1u, 4u, 9u, 16u]/~; } #[test] fn test_min() { - assert min([5, 4, 1, 2, 3]) == 1; + assert min([5, 4, 1, 2, 3]/~) == 1; } #[test] #[should_fail] #[ignore(cfg(windows))] fn test_min_empty() { - min::([]); + min::([]/~); } #[test] fn test_max() { - assert max([1, 2, 4, 2, 3]) == 4; + assert max([1, 2, 4, 2, 3]/~) == 4; } #[test] #[should_fail] #[ignore(cfg(windows))] fn test_max_empty() { - max::([]); + max::([]/~); } #[test] fn test_reversed() { - assert to_vec(bind reversed([1, 2, 3], _)) == [3, 2, 1]; + assert to_vec(bind reversed([1, 2, 3]/~, _)) == [3, 2, 1]/~; } #[test] fn test_count() { - assert count([1, 2, 1, 2, 1], 1) == 3u; + assert count([1, 2, 1, 2, 1]/~, 1) == 3u; } #[test] @@ -261,7 +261,7 @@ fn test_foldr() { fn sub(&&a: int, &&b: int) -> int { a - b } - let sum = foldr([1, 2, 3, 4], 0, sub); + let sum = foldr([1, 2, 3, 4]/~, 0, sub); assert sum == -2; } */ diff --git a/src/libcore/os.rs b/src/libcore/os.rs index ee26bfc5496..88608676d23 100644 --- a/src/libcore/os.rs +++ b/src/libcore/os.rs @@ -40,23 +40,23 @@ export walk_dir; export as_c_charp, fill_charp_buf; native mod rustrt { - fn rust_env_pairs() -> [str]; + fn rust_env_pairs() -> [str]/~; fn rust_getcwd() -> str; fn rust_path_is_dir(path: *libc::c_char) -> c_int; fn rust_path_exists(path: *libc::c_char) -> c_int; - fn rust_list_files(path: str) -> [str]; + fn rust_list_files(path: str) -> [str]/~; fn rust_process_wait(handle: c_int) -> c_int; fn last_os_error() -> str; fn rust_set_exit_status(code: libc::intptr_t); } -fn env() -> [(str,str)] { - let mut pairs = []; +fn env() -> [(str,str)]/~ { + let mut pairs = []/~; for vec::each(rustrt::rust_env_pairs()) {|p| let vs = str::splitn_char(p, '=', 1u); assert vec::len(vs) == 2u; - pairs += [(vs[0], vs[1])]; + vec::push(pairs, (vs[0], vs[1])); } ret pairs; } @@ -116,7 +116,7 @@ mod win32 { fn as_utf16_p(s: str, f: fn(*u16) -> T) -> T { let mut t = str::to_utf16(s); // Null terminate before passing on. - t += [0u16]; + t += [0u16]/~; vec::as_buf(t, f) } } @@ -373,7 +373,7 @@ fn self_exe_path() -> option { fill_charp_buf() {|buf, sz| let mib = [CTL_KERN as c_int, KERN_PROC as c_int, - KERN_PROC_PATHNAME as c_int, -1 as c_int]; + KERN_PROC_PATHNAME as c_int, -1 as c_int]/~; sysctl(vec::unsafe::to_ptr(mib), vec::len(mib) as c_uint, buf as *mut c_void, ptr::mut_addr_of(sz), ptr::null(), 0u as size_t) == (0 as c_int) @@ -553,7 +553,7 @@ fn make_dir(p: path, mode: c_int) -> bool { } #[doc = "Lists the contents of a directory"] -fn list_dir(p: path) -> [str] { +fn list_dir(p: path) -> [str]/~ { #[cfg(unix)] fn star(p: str) -> str { p } @@ -579,7 +579,7 @@ Lists the contents of a directory This version prepends each entry with the directory. "] -fn list_dir_path(p: path) -> [str] { +fn list_dir_path(p: path) -> [str]/~ { let mut p = p; let pl = str::len(p); if pl == 0u || (p[pl - 1u] as char != path::consts::path_sep @@ -670,7 +670,7 @@ fn copy_file(from: path, to: path) -> bool { fclose(istream); ret false; } - let mut buf : [mut u8] = [mut]; + let mut buf : [mut u8]/~ = [mut]/~; let bufsize = 8192u; vec::reserve(buf, bufsize); let mut done = false; @@ -978,7 +978,7 @@ mod tests { }; assert (ostream as uint != 0u); let s = "hello"; - let mut buf = vec::to_mut(str::bytes(s) + [0 as u8]); + let mut buf = vec::to_mut(str::bytes(s) + [0 as u8]/~); vec::as_mut_buf(buf) {|b| assert (libc::fwrite(b as *c_void, 1u as size_t, (str::len(s) + 1u) as size_t, ostream) @@ -989,7 +989,7 @@ mod tests { fail (#fmt("%s doesn't exist", in)); } assert(rs); - let rslt = run::run_program("diff", [in, out]); + let rslt = run::run_program("diff", [in, out]/~); assert (rslt == 0); assert (remove_file(in)); assert (remove_file(out)); diff --git a/src/libcore/path.rs b/src/libcore/path.rs index 93b32b6b8bd..e75f104bea4 100644 --- a/src/libcore/path.rs +++ b/src/libcore/path.rs @@ -127,7 +127,7 @@ Connects a vector of path segments into a single path. Inserts path separators as needed. "] -fn connect_many(paths: [path]) -> path { +fn connect_many(paths: [path]/~) -> path { ret if vec::len(paths) == 1u { paths[0] } else { @@ -144,7 +144,7 @@ each piece of the path. On Windows, if the path is absolute then the first element of the returned vector will be the drive letter followed by a colon. "] -fn split(p: path) -> [path] { +fn split(p: path) -> [path]/~ { str::split_nonempty(p, {|c| c == consts::path_sep || c == consts::alt_path_sep }) @@ -234,7 +234,7 @@ fn normalize(p: path) -> path { ret s; - fn strip_dots(s: [path]) -> [path] { + fn strip_dots(s: [path]/~) -> [path]/~ { vec::filter_map(s, { |elem| if elem == "." { option::none @@ -244,12 +244,12 @@ fn normalize(p: path) -> path { }) } - fn rollup_doubledots(s: [path]) -> [path] { + fn rollup_doubledots(s: [path]/~) -> [path]/~ { if vec::is_empty(s) { - ret []; + ret []/~; } - let mut t = []; + let mut t = []/~; let mut i = vec::len(s); let mut skip = 0; while i != 0u { @@ -258,7 +258,7 @@ fn normalize(p: path) -> path { skip += 1; } else { if skip == 0 { - t += [s[i]]; + vec::push(t, s[i]); } else { skip -= 1; } @@ -266,7 +266,7 @@ fn normalize(p: path) -> path { } let mut t = vec::reversed(t); while skip > 0 { - t += [".."]; + vec::push(t, ".."); skip -= 1; } ret t; @@ -322,28 +322,28 @@ mod tests { #[test] fn split1() { let actual = split("a" + ps() + "b"); - let expected = ["a", "b"]; + let expected = ["a", "b"]/~; assert actual == expected; } #[test] fn split2() { let actual = split("a" + aps() + "b"); - let expected = ["a", "b"]; + let expected = ["a", "b"]/~; assert actual == expected; } #[test] fn split3() { let actual = split(ps() + "a" + ps() + "b"); - let expected = ["a", "b"]; + let expected = ["a", "b"]/~; assert actual == expected; } #[test] fn split4() { let actual = split("a" + ps() + "b" + aps() + "c"); - let expected = ["a", "b", "c"]; + let expected = ["a", "b", "c"]/~; assert actual == expected; } diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index d6288ea2126..bda418d428c 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -149,8 +149,8 @@ fn test() { assert (p.fst == 50); assert (p.snd == 60); - let v0 = [32000u16, 32001u16, 32002u16]; - let v1 = [0u16, 0u16, 0u16]; + let v0 = [32000u16, 32001u16, 32002u16]/~; + let v1 = [0u16, 0u16, 0u16]/~; ptr::memcpy(ptr::offset(vec::unsafe::to_ptr(v1), 1u), ptr::offset(vec::unsafe::to_ptr(v0), 1u), 1u); @@ -185,7 +185,7 @@ fn test_buf_len() { str::as_c_str(s0) {|p0| str::as_c_str(s1) {|p1| str::as_c_str(s2) {|p2| - let v = [p0, p1, p2, null()]; + let v = [p0, p1, p2, null()]/~; vec::as_buf(v) {|vp| assert unsafe { buf_len(vp) } == 3u; } diff --git a/src/libcore/rand.rs b/src/libcore/rand.rs index 99ac4addc78..e4cbd475c9a 100644 --- a/src/libcore/rand.rs +++ b/src/libcore/rand.rs @@ -7,9 +7,9 @@ enum rctx {} #[abi = "cdecl"] native mod rustrt { - fn rand_seed() -> [u8]; + fn rand_seed() -> [u8]/~; fn rand_new() -> *rctx; - fn rand_new_seeded(seed: [u8]) -> *rctx; + fn rand_new_seeded(seed: [u8]/~) -> *rctx; fn rand_next(c: *rctx) -> u32; fn rand_free(c: *rctx); } @@ -151,19 +151,19 @@ impl extensions for rng { } #[doc = "Return a random byte string of the specified length"] - fn gen_bytes(len: uint) -> [u8] { + fn gen_bytes(len: uint) -> [u8]/~ { vec::from_fn(len) {|_i| self.gen_u8() } } #[doc = "Choose an item randomly, failing if values is empty"] - fn choose(values: [T]) -> T { + fn choose(values: [T]/~) -> T { self.choose_option(values).get() } #[doc = "Choose some(item) randomly, returning none if values is empty"] - fn choose_option(values: [T]) -> option { + fn choose_option(values: [T]/~) -> option { if values.is_empty() { none } else { @@ -173,13 +173,13 @@ impl extensions for rng { #[doc = "Choose an item respecting the relative weights, failing if \ the sum of the weights is 0"] - fn choose_weighted(v : [weighted]) -> T { + fn choose_weighted(v : [weighted]/~) -> T { self.choose_weighted_option(v).get() } #[doc = "Choose some(item) respecting the relative weights, returning \ none if the sum of the weights is 0"] - fn choose_weighted_option(v: [weighted]) -> option { + fn choose_weighted_option(v: [weighted]/~) -> option { let mut total = 0u; for v.each {|item| total += item.weight; @@ -200,25 +200,25 @@ impl extensions for rng { #[doc = "Return a vec containing copies of the items, in order, where \ the weight of the item determines how many copies there are"] - fn weighted_vec(v: [weighted]) -> [T] { - let mut r = []; + fn weighted_vec(v: [weighted]/~) -> [T]/~ { + let mut r = []/~; for v.each {|item| for uint::range(0u, item.weight) {|_i| - r += [item.item]; + r += [item.item]/~; } } r } #[doc = "Shuffle a vec"] - fn shuffle(values: [T]) -> [T] { + fn shuffle(values: [T]/~) -> [T]/~ { let mut m = vec::to_mut(values); self.shuffle_mut(m); ret vec::from_mut(m); } #[doc = "Shuffle a mutable vec in place"] - fn shuffle_mut(&&values: [mut T]) { + fn shuffle_mut(&&values: [mut T]/~) { let mut i = values.len(); while i >= 2u { // invariant: elements with index >= i have been locked in place. @@ -241,7 +241,7 @@ impl of rng for @rand_res { } #[doc = "Create a new random seed for seeded_rng"] -fn seed() -> [u8] { +fn seed() -> [u8]/~ { rustrt::rand_seed() } @@ -254,7 +254,7 @@ fn rng() -> rng { generator constructed with a given seed will generate the same \ sequence of values as all other generators constructed with the \ same seed. The seed may be any length."] -fn seeded_rng(seed: [u8]) -> rng { +fn seeded_rng(seed: [u8]/~) -> rng { @rand_res(rustrt::rand_new_seeded(seed)) as rng } @@ -301,7 +301,7 @@ mod tests { #[test] fn rng_seeded_custom_seed() { // much shorter than generated seeds which are 1024 bytes - let seed = [2u8, 32u8, 4u8, 32u8, 51u8]; + let seed = [2u8, 32u8, 4u8, 32u8, 51u8]/~; let ra = rand::seeded_rng(seed); let rb = rand::seeded_rng(seed); assert ra.gen_str(100u) == rb.gen_str(100u); @@ -309,7 +309,7 @@ mod tests { #[test] fn rng_seeded_custom_seed2() { - let seed = [2u8, 32u8, 4u8, 32u8, 51u8]; + let seed = [2u8, 32u8, 4u8, 32u8, 51u8]/~; let ra = rand::seeded_rng(seed); // Regression test that isaac is actually using the above vector let r = ra.next(); @@ -387,55 +387,56 @@ mod tests { #[test] fn choose() { let r = rand::rng(); - assert r.choose([1, 1, 1]) == 1; + assert r.choose([1, 1, 1]/~) == 1; } #[test] fn choose_option() { let r = rand::rng(); - assert r.choose_option([]) == none::; - assert r.choose_option([1, 1, 1]) == some(1); + assert r.choose_option([]/~) == none::; + assert r.choose_option([1, 1, 1]/~) == some(1); } #[test] fn choose_weighted() { let r = rand::rng(); - assert r.choose_weighted([{weight: 1u, item: 42}]) == 42; + assert r.choose_weighted([{weight: 1u, item: 42}]/~) == 42; assert r.choose_weighted([ {weight: 0u, item: 42}, {weight: 1u, item: 43} - ]) == 43; + ]/~) == 43; } #[test] fn choose_weighted_option() { let r = rand::rng(); - assert r.choose_weighted_option([{weight: 1u, item: 42}]) == some(42); + assert r.choose_weighted_option([{weight: 1u, item: 42}]/~) == + some(42); assert r.choose_weighted_option([ {weight: 0u, item: 42}, {weight: 1u, item: 43} - ]) == some(43); - assert r.choose_weighted_option([]) == none::; + ]/~) == some(43); + assert r.choose_weighted_option([]/~) == none::; } #[test] fn weighted_vec() { let r = rand::rng(); - let empty: [int] = []; - assert r.weighted_vec([]) == empty; + let empty: [int]/~ = []/~; + assert r.weighted_vec([]/~) == empty; assert r.weighted_vec([ {weight: 0u, item: 3u}, {weight: 1u, item: 2u}, {weight: 2u, item: 1u} - ]) == [2u, 1u, 1u]; + ]/~) == [2u, 1u, 1u]/~; } #[test] fn shuffle() { let r = rand::rng(); - let empty: [int] = []; - assert r.shuffle([]) == empty; - assert r.shuffle([1, 1, 1]) == [1, 1, 1]; + let empty: [int]/~ = []/~; + assert r.shuffle([]/~) == empty; + assert r.shuffle([1, 1, 1]/~) == [1, 1, 1]/~; } } diff --git a/src/libcore/result.rs b/src/libcore/result.rs index 5c65971f2ea..f9b8388f465 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -245,18 +245,18 @@ checking for overflow: if x == uint::max_value { ret err(\"overflow\"); } else { ret ok(x+1u); } } - map([1u, 2u, 3u], inc_conditionally).chain {|incd| - assert incd == [2u, 3u, 4u]; + map([1u, 2u, 3u]/~, inc_conditionally).chain {|incd| + assert incd == [2u, 3u, 4u]/~; } "] fn map_vec( - ts: [T], op: fn(T) -> result) -> result<[V],U> { + ts: [T]/~, op: fn(T) -> result) -> result<[V]/~,U> { - let mut vs: [V] = []; + let mut vs: [V]/~ = []/~; vec::reserve(vs, vec::len(ts)); for vec::each(ts) {|t| alt op(t) { - ok(v) { vs += [v]; } + ok(v) { vs += [v]/~; } err(u) { ret err(u); } } } @@ -284,16 +284,17 @@ length. While we do not often use preconditions in the standard library, a precondition is used here because result::t is generally used in 'careful' code contexts where it is both appropriate and easy to accommodate an error like the vectors being of different lengths."] -fn map_vec2(ss: [S], ts: [T], op: fn(S,T) -> result) - : vec::same_length(ss, ts) -> result<[V],U> { +fn map_vec2(ss: [S]/~, ts: [T]/~, + op: fn(S,T) -> result) + : vec::same_length(ss, ts) -> result<[V]/~,U> { let n = vec::len(ts); - let mut vs = []; + let mut vs = []/~; vec::reserve(vs, n); let mut i = 0u; while i < n { alt op(ss[i],ts[i]) { - ok(v) { vs += [v]; } + ok(v) { vs += [v]/~; } err(u) { ret err(u); } } i += 1u; @@ -306,7 +307,7 @@ Applies op to the pairwise elements from `ss` and `ts`, aborting on error. This could be implemented using `map2()` but it is more efficient on its own as no result vector is built. "] -fn iter_vec2(ss: [S], ts: [T], +fn iter_vec2(ss: [S]/~, ts: [T]/~, op: fn(S,T) -> result<(),U>) : vec::same_length(ss, ts) -> result<(),U> { diff --git a/src/libcore/run.rs b/src/libcore/run.rs index beb314593ab..6c724db52b5 100644 --- a/src/libcore/run.rs +++ b/src/libcore/run.rs @@ -62,8 +62,8 @@ Run a program, providing stdin, stdout and stderr handles The process id of the spawned process "] -fn spawn_process(prog: str, args: [str], - env: option<[(str,str)]>, +fn spawn_process(prog: str, args: [str]/~, + env: option<[(str,str)]/~>, dir: option, in_fd: c_int, out_fd: c_int, err_fd: c_int) -> pid_t { @@ -77,36 +77,36 @@ fn spawn_process(prog: str, args: [str], } } -fn with_argv(prog: str, args: [str], +fn with_argv(prog: str, args: [str]/~, cb: fn(**libc::c_char) -> T) -> T { - let mut argptrs = str::as_c_str(prog) {|b| [b] }; - let mut tmps = []; + let mut argptrs = str::as_c_str(prog) {|b| [b]/~ }; + let mut tmps = []/~; for vec::each(args) {|arg| let t = @arg; - tmps += [t]; - argptrs += str::as_c_str(*t) {|b| [b] }; + tmps += [t]/~; + argptrs += str::as_c_str(*t) {|b| [b]/~ }; } - argptrs += [ptr::null()]; + argptrs += [ptr::null()]/~; vec::as_buf(argptrs, cb) } #[cfg(unix)] -fn with_envp(env: option<[(str,str)]>, +fn with_envp(env: option<[(str,str)]/~>, cb: fn(*c_void) -> T) -> T { // On posixy systems we can pass a char** for envp, which is // a null-terminated array of "k=v\n" strings. alt env { some(es) if !vec::is_empty(es) { - let mut tmps = []; - let mut ptrs = []; + let mut tmps = []/~; + let mut ptrs = []/~; for vec::each(es) {|e| let (k,v) = e; let t = @(#fmt("%s=%s", k, v)); vec::push(tmps, t); - ptrs += str::as_c_str(*t) {|b| [b]}; + ptrs += str::as_c_str(*t) {|b| [b]/~}; } - ptrs += [ptr::null()]; + ptrs += [ptr::null()]/~; vec::as_buf(ptrs) { |p| unsafe { cb(::unsafe::reinterpret_cast(p)) } } @@ -118,7 +118,7 @@ fn with_envp(env: option<[(str,str)]>, } #[cfg(windows)] -fn with_envp(env: option<[(str,str)]>, +fn with_envp(env: option<[(str,str)]/~>, cb: fn(*c_void) -> T) -> T { // On win32 we pass an "environment block" which is not a char**, but // rather a concatenation of null-terminated k=v\0 sequences, with a final @@ -126,15 +126,15 @@ fn with_envp(env: option<[(str,str)]>, unsafe { alt env { some(es) if !vec::is_empty(es) { - let mut blk : [u8] = []; + let mut blk : [u8]/~ = []/~; for vec::each(es) {|e| let (k,v) = e; let t = #fmt("%s=%s", k, v); - let mut v : [u8] = ::unsafe::reinterpret_cast(t); + let mut v : [u8]/~ = ::unsafe::reinterpret_cast(t); blk += v; ::unsafe::forget(v); } - blk += [0_u8]; + blk += [0_u8]/~; vec::as_buf(blk) {|p| cb(::unsafe::reinterpret_cast(p)) } } _ { @@ -164,7 +164,7 @@ Spawns a process and waits for it to terminate The process id "] -fn run_program(prog: str, args: [str]) -> int { +fn run_program(prog: str, args: [str]/~) -> int { let pid = spawn_process(prog, args, none, none, 0i32, 0i32, 0i32); if pid == -1 as pid_t { fail; } @@ -187,7 +187,7 @@ The class will ensure that file descriptors are closed properly. A class with a field "] -fn start_program(prog: str, args: [str]) -> program { +fn start_program(prog: str, args: [str]/~) -> program { let pipe_input = os::pipe(); let pipe_output = os::pipe(); let pipe_err = os::pipe(); @@ -271,7 +271,7 @@ contents of stdout and stderr. A record, {status: int, out: str, err: str} containing the exit code, the contents of stdout and the contents of stderr. "] -fn program_output(prog: str, args: [str]) -> +fn program_output(prog: str, args: [str]/~) -> {status: int, out: str, err: str} { let pipe_in = os::pipe(); @@ -397,9 +397,9 @@ mod tests { // Regression test for memory leaks #[ignore(cfg(windows))] // FIXME (#2626) fn test_leaks() { - run::run_program("echo", []); - run::start_program("echo", []); - run::program_output("echo", []); + run::run_program("echo", []/~); + run::start_program("echo", []/~); + run::program_output("echo", []/~); } #[test] @@ -410,7 +410,7 @@ mod tests { let pid = run::spawn_process( - "cat", [], none, none, + "cat", []/~, none, none, pipe_in.in, pipe_out.out, pipe_err.out); os::close(pipe_in.in); os::close(pipe_out.out); @@ -430,7 +430,7 @@ mod tests { #[test] fn waitpid() { - let pid = run::spawn_process("false", [], + let pid = run::spawn_process("false", []/~, none, none, 0i32, 0i32, 0i32); let status = run::waitpid(pid); diff --git a/src/libcore/str.rs b/src/libcore/str.rs index 7b64f6da0bb..63d833e197f 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -122,7 +122,7 @@ Convert a vector of bytes to a UTF-8 string Fails if invalid UTF-8 "] -pure fn from_bytes(vv: [u8]) -> str { +pure fn from_bytes(vv: [u8]/~) -> str { assert is_utf8(vv); ret unsafe { unsafe::from_bytes(vv) }; } @@ -136,7 +136,7 @@ Fails if invalid UTF-8 "] pure fn from_byte(b: u8) -> str { assert b < 128u8; - let mut v = [b, 0u8]; + let mut v = [b, 0u8]/~; unsafe { ::unsafe::transmute(v) } } @@ -209,7 +209,7 @@ fn push_char(&s: str, ch: char) { } as_bytes(s) {|bytes| - let mut mut_bytes: [u8] = ::unsafe::reinterpret_cast(bytes); + let mut mut_bytes: [u8]/~ = ::unsafe::reinterpret_cast(bytes); vec::unsafe::set_len(mut_bytes, new_len + 1u); ::unsafe::forget(mut_bytes); } @@ -322,10 +322,10 @@ Converts a string to a vector of bytes The result vector is not null-terminated. "] -pure fn bytes(s: str) -> [u8] { +pure fn bytes(s: str) -> [u8]/~ { unsafe { let mut s_copy = s; - let mut v: [u8] = ::unsafe::transmute(s_copy); + let mut v: [u8]/~ = ::unsafe::transmute(s_copy); vec::unsafe::set_len(v, len(s)); ret v; } @@ -342,12 +342,12 @@ pure fn byte_slice(s: str/&, f: fn([u8]/&) -> T) -> T { } #[doc = "Convert a string to a vector of characters"] -pure fn chars(s: str/&) -> [char] { - let mut buf = [], i = 0u; +pure fn chars(s: str/&) -> [char]/~ { + let mut buf = []/~, i = 0u; let len = len(s); while i < len { let {ch, next} = char_range_at(s, i); - buf += [ch]; + buf += [ch]/~; i = next; } ret buf; @@ -378,7 +378,7 @@ pure fn slice(s: str/&, begin: uint, end: uint) -> str { #[doc = " Splits a string into substrings at each occurrence of a given character "] -pure fn split_char(s: str/&, sep: char) -> [str] { +pure fn split_char(s: str/&, sep: char) -> [str]/~ { split_char_inner(s, sep, len(s), true) } @@ -388,27 +388,27 @@ character up to 'count' times The byte must be a valid UTF-8/ASCII byte "] -pure fn splitn_char(s: str/&, sep: char, count: uint) -> [str] { +pure fn splitn_char(s: str/&, sep: char, count: uint) -> [str]/~ { split_char_inner(s, sep, count, true) } #[doc = " Like `split_char`, but omits empty strings from the returned vector "] -pure fn split_char_nonempty(s: str/&, sep: char) -> [str] { +pure fn split_char_nonempty(s: str/&, sep: char) -> [str]/~ { split_char_inner(s, sep, len(s), false) } pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool) - -> [str] { + -> [str]/~ { if sep < 128u as char { let b = sep as u8, l = len(s); - let mut result = [], done = 0u; + let mut result = []/~, done = 0u; let mut i = 0u, start = 0u; while i < l && done < count { if s[i] == b { if allow_empty || start < i { - result += [unsafe { unsafe::slice_bytes(s, start, i) }]; + result += [unsafe { unsafe::slice_bytes(s, start, i) }]/~; } start = i + 1u; done += 1u; @@ -416,7 +416,7 @@ pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool) i += 1u; } if allow_empty || start < l { - result += [unsafe { unsafe::slice_bytes(s, start, l) }]; + result += [unsafe { unsafe::slice_bytes(s, start, l) }]/~; } result } else { @@ -426,7 +426,7 @@ pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool) #[doc = "Splits a string into substrings using a character function"] -pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str] { +pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str]/~ { split_inner(s, sepfn, len(s), true) } @@ -434,24 +434,24 @@ pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str] { Splits a string into substrings using a character function, cutting at most `count` times. "] -pure fn splitn(s: str/&, sepfn: fn(char) -> bool, count: uint) -> [str] { +pure fn splitn(s: str/&, sepfn: fn(char) -> bool, count: uint) -> [str]/~ { split_inner(s, sepfn, count, true) } #[doc = "Like `split`, but omits empty strings from the returned vector"] -pure fn split_nonempty(s: str/&, sepfn: fn(char) -> bool) -> [str] { +pure fn split_nonempty(s: str/&, sepfn: fn(char) -> bool) -> [str]/~ { split_inner(s, sepfn, len(s), false) } pure fn split_inner(s: str/&, sepfn: fn(cc: char) -> bool, count: uint, - allow_empty: bool) -> [str] { + allow_empty: bool) -> [str]/~ { let l = len(s); - let mut result = [], i = 0u, start = 0u, done = 0u; + let mut result = []/~, i = 0u, start = 0u, done = 0u; while i < l && done < count { let {ch, next} = char_range_at(s, i); if sepfn(ch) { if allow_empty || start < i { - result += [unsafe { unsafe::slice_bytes(s, start, i) }]; + result += [unsafe { unsafe::slice_bytes(s, start, i) }]/~; } start = next; done += 1u; @@ -459,7 +459,7 @@ pure fn split_inner(s: str/&, sepfn: fn(cc: char) -> bool, count: uint, i = next; } if allow_empty || start < l { - result += [unsafe { unsafe::slice_bytes(s, start, l) }]; + result += [unsafe { unsafe::slice_bytes(s, start, l) }]/~; } result } @@ -510,19 +510,19 @@ Splits a string into a vector of the substrings separated by a given string assert [\"\", \"XXX\", \"YYY\", \"\"] == split_str(\".XXX.YYY.\", \".\") ~~~ "] -pure fn split_str(s: str/&a, sep: str/&b) -> [str] { - let mut result = []; +pure fn split_str(s: str/&a, sep: str/&b) -> [str]/~ { + let mut result = []/~; iter_between_matches(s, sep) {|from, to| - unsafe { result += [unsafe::slice_bytes(s, from, to)]; } + unsafe { result += [unsafe::slice_bytes(s, from, to)]/~; } } result } -pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str] { - let mut result = []; +pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str]/~ { + let mut result = []/~; iter_between_matches(s, sep) {|from, to| if to > from { - unsafe { result += [unsafe::slice_bytes(s, from, to)]; } + unsafe { result += [unsafe::slice_bytes(s, from, to)]/~; } } } result @@ -531,13 +531,13 @@ pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str] { #[doc = " Splits a string into a vector of the substrings separated by LF ('\\n') "] -pure fn lines(s: str/&) -> [str] { split_char(s, '\n') } +pure fn lines(s: str/&) -> [str]/~ { split_char(s, '\n') } #[doc = " Splits a string into a vector of the substrings separated by LF ('\\n') and/or CR LF ('\\r\\n') "] -pure fn lines_any(s: str/&) -> [str] { +pure fn lines_any(s: str/&) -> [str]/~ { vec::map(lines(s), {|s| let l = len(s); let mut cp = s; @@ -551,7 +551,7 @@ pure fn lines_any(s: str/&) -> [str] { #[doc = " Splits a string into a vector of the substrings separated by whitespace "] -pure fn words(s: str/&) -> [str] { +pure fn words(s: str/&) -> [str]/~ { split_nonempty(s, {|c| char::is_whitespace(c)}) } @@ -1264,8 +1264,8 @@ pure fn is_utf16(v: [const u16]/&) -> bool { } #[doc = "Converts to a vector of `u16` encoded as UTF-16"] -pure fn to_utf16(s: str/&) -> [u16] { - let mut u = []; +pure fn to_utf16(s: str/&) -> [u16]/~ { + let mut u = []/~; chars_iter(s) {|cch| // Arithmetic with u32 literals is easier on the eyes than chars. let mut ch = cch as u32; @@ -1273,14 +1273,14 @@ pure fn to_utf16(s: str/&) -> [u16] { if (ch & 0xFFFF_u32) == ch { // The BMP falls through (assuming non-surrogate, as it should) assert ch <= 0xD7FF_u32 || ch >= 0xE000_u32; - u += [ch as u16] + u += [ch as u16]/~ } else { // Supplementary planes break into surrogates. assert ch >= 0x1_0000_u32 && ch <= 0x10_FFFF_u32; ch -= 0x1_0000_u32; let w1 = 0xD800_u16 | ((ch >> 10) as u16); let w2 = 0xDC00_u16 | ((ch as u16) & 0x3FF_u16); - u += [w1, w2] + u += [w1, w2]/~ } } ret u; @@ -1568,9 +1568,9 @@ interop. let i = str::as_bytes(\"Hello World\") { |bytes| vec::len(bytes) }; ~~~ "] -pure fn as_bytes(s: str, f: fn([u8]) -> T) -> T { +pure fn as_bytes(s: str, f: fn([u8]/~) -> T) -> T { unsafe { - let v: *[u8] = ::unsafe::reinterpret_cast(ptr::addr_of(s)); + let v: *[u8]/~ = ::unsafe::reinterpret_cast(ptr::addr_of(s)); f(*v) } } @@ -1723,7 +1723,7 @@ mod unsafe { #[doc = "Create a Rust string from a *u8 buffer of the given length"] unsafe fn from_buf_len(buf: *u8, len: uint) -> str { - let mut v: [u8] = []; + let mut v: [u8]/~ = []/~; vec::reserve(v, len + 1u); vec::as_buf(v) {|b| ptr::memcpy(b, buf, len); } vec::unsafe::set_len(v, len); @@ -1750,9 +1750,9 @@ mod unsafe { Does not verify that the vector contains valid UTF-8. "] - unsafe fn from_bytes(v: [const u8]) -> str { + unsafe fn from_bytes(v: [const u8]/~) -> str { unsafe { - let mut vcopy : [u8] = ::unsafe::transmute(copy v); + let mut vcopy = ::unsafe::transmute(copy v); vec::push(vcopy, 0u8); ::unsafe::transmute(vcopy) } @@ -1763,7 +1763,7 @@ mod unsafe { Does not verify that the byte is valid UTF-8. "] - unsafe fn from_byte(u: u8) -> str { unsafe::from_bytes([u]) } + unsafe fn from_byte(u: u8) -> str { unsafe::from_bytes([u]/~) } #[doc = " Takes a bytewise (not UTF-8) slice from a string. @@ -1780,7 +1780,7 @@ mod unsafe { assert (begin <= end); assert (end <= n); - let mut v = []; + let mut v = []/~; vec::reserve(v, end - begin + 1u); unsafe { vec::as_buf(v) { |vbuf| @@ -1788,7 +1788,7 @@ mod unsafe { ptr::memcpy(vbuf, src, end - begin); } vec::unsafe::set_len(v, end - begin); - v += [0u8]; + v += [0u8]/~; ::unsafe::transmute(v) } } @@ -1800,7 +1800,7 @@ mod unsafe { } #[doc = "Appends a vector of bytes to a string. (Not UTF-8 safe)."] - unsafe fn push_bytes(&s: str, bytes: [u8]) { + unsafe fn push_bytes(&s: str, bytes: [u8]/~) { for vec::each(bytes) {|byte| rustrt::rust_str_push(s, byte); } } @@ -1839,7 +1839,7 @@ mod unsafe { #[test] fn test_from_buf_len() { unsafe { - let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]; + let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]/~; let b = vec::unsafe::to_ptr(a); let c = from_buf_len(b, 3u); assert (c == "AAA"); @@ -1920,18 +1920,18 @@ impl extensions/& for str/& { fn slice(begin: uint, end: uint) -> str { slice(self, begin, end) } #[doc = "Splits a string into substrings using a character function"] #[inline] - fn split(sepfn: fn(char) -> bool) -> [str] { split(self, sepfn) } + fn split(sepfn: fn(char) -> bool) -> [str]/~ { split(self, sepfn) } #[doc = " Splits a string into substrings at each occurrence of a given character "] #[inline] - fn split_char(sep: char) -> [str] { split_char(self, sep) } + fn split_char(sep: char) -> [str]/~ { split_char(self, sep) } #[doc = " Splits a string into a vector of the substrings separated by a given string "] #[inline] - fn split_str(sep: str/&a) -> [str] { split_str(self, sep) } + fn split_str(sep: str/&a) -> [str]/~ { split_str(self, sep) } #[doc = "Returns true if one string starts with another"] #[inline] fn starts_with(needle: str/&a) -> bool { starts_with(self, needle) } @@ -2032,79 +2032,79 @@ mod tests { #[test] fn test_split_char() { - fn t(s: str, c: char, u: [str]) { + fn t(s: str, c: char, u: [str]/~) { log(debug, "split_byte: " + s); let v = split_char(s, c); #debug("split_byte to: %?", v); assert vec::all2(v, u, { |a,b| a == b }); } - t("abc.hello.there", '.', ["abc", "hello", "there"]); - t(".hello.there", '.', ["", "hello", "there"]); - t("...hello.there.", '.', ["", "", "", "hello", "there", ""]); + t("abc.hello.there", '.', ["abc", "hello", "there"]/~); + t(".hello.there", '.', ["", "hello", "there"]/~); + t("...hello.there.", '.', ["", "", "", "hello", "there", ""]/~); - assert ["", "", "", "hello", "there", ""] + assert ["", "", "", "hello", "there", ""]/~ == split_char("...hello.there.", '.'); - assert [""] == split_char("", 'z'); - assert ["",""] == split_char("z", 'z'); - assert ["ok"] == split_char("ok", 'z'); + assert [""]/~ == split_char("", 'z'); + assert ["",""]/~ == split_char("z", 'z'); + assert ["ok"]/~ == split_char("ok", 'z'); } #[test] fn test_split_char_2() { let data = "ประเทศไทย中华Việt Nam"; - assert ["ประเทศไทย中华", "iệt Nam"] + assert ["ประเทศไทย中华", "iệt Nam"]/~ == split_char(data, 'V'); - assert ["ประเ", "ศไ", "ย中华Việt Nam"] + assert ["ประเ", "ศไ", "ย中华Việt Nam"]/~ == split_char(data, 'ท'); } #[test] fn test_splitn_char() { - fn t(s: str, c: char, n: uint, u: [str]) { + fn t(s: str, c: char, n: uint, u: [str]/~) { log(debug, "splitn_byte: " + s); let v = splitn_char(s, c, n); #debug("split_byte to: %?", v); #debug("comparing vs. %?", u); assert vec::all2(v, u, { |a,b| a == b }); } - t("abc.hello.there", '.', 0u, ["abc.hello.there"]); - t("abc.hello.there", '.', 1u, ["abc", "hello.there"]); - t("abc.hello.there", '.', 2u, ["abc", "hello", "there"]); - t("abc.hello.there", '.', 3u, ["abc", "hello", "there"]); - t(".hello.there", '.', 0u, [".hello.there"]); - t(".hello.there", '.', 1u, ["", "hello.there"]); - t("...hello.there.", '.', 3u, ["", "", "", "hello.there."]); - t("...hello.there.", '.', 5u, ["", "", "", "hello", "there", ""]); + t("abc.hello.there", '.', 0u, ["abc.hello.there"]/~); + t("abc.hello.there", '.', 1u, ["abc", "hello.there"]/~); + t("abc.hello.there", '.', 2u, ["abc", "hello", "there"]/~); + t("abc.hello.there", '.', 3u, ["abc", "hello", "there"]/~); + t(".hello.there", '.', 0u, [".hello.there"]/~); + t(".hello.there", '.', 1u, ["", "hello.there"]/~); + t("...hello.there.", '.', 3u, ["", "", "", "hello.there."]/~); + t("...hello.there.", '.', 5u, ["", "", "", "hello", "there", ""]/~); - assert [""] == splitn_char("", 'z', 5u); - assert ["",""] == splitn_char("z", 'z', 5u); - assert ["ok"] == splitn_char("ok", 'z', 5u); - assert ["z"] == splitn_char("z", 'z', 0u); - assert ["w.x.y"] == splitn_char("w.x.y", '.', 0u); - assert ["w","x.y"] == splitn_char("w.x.y", '.', 1u); + assert [""]/~ == splitn_char("", 'z', 5u); + assert ["",""]/~ == splitn_char("z", 'z', 5u); + assert ["ok"]/~ == splitn_char("ok", 'z', 5u); + assert ["z"]/~ == splitn_char("z", 'z', 0u); + assert ["w.x.y"]/~ == splitn_char("w.x.y", '.', 0u); + assert ["w","x.y"]/~ == splitn_char("w.x.y", '.', 1u); } #[test] fn test_splitn_char_2 () { let data = "ประเทศไทย中华Việt Nam"; - assert ["ประเทศไทย中", "Việt Nam"] + assert ["ประเทศไทย中", "Việt Nam"]/~ == splitn_char(data, '华', 1u); - assert ["", "", "XXX", "YYYzWWWz"] + assert ["", "", "XXX", "YYYzWWWz"]/~ == splitn_char("zzXXXzYYYzWWWz", 'z', 3u); - assert ["",""] == splitn_char("z", 'z', 5u); - assert [""] == splitn_char("", 'z', 5u); - assert ["ok"] == splitn_char("ok", 'z', 5u); + assert ["",""]/~ == splitn_char("z", 'z', 5u); + assert [""]/~ == splitn_char("", 'z', 5u); + assert ["ok"]/~ == splitn_char("ok", 'z', 5u); } #[test] fn test_splitn_char_3() { let data = "ประเทศไทย中华Việt Nam"; - assert ["ประเทศไทย中华", "iệt Nam"] + assert ["ประเทศไทย中华", "iệt Nam"]/~ == splitn_char(data, 'V', 1u); - assert ["ประเ", "ศไทย中华Việt Nam"] + assert ["ประเ", "ศไทย中华Việt Nam"]/~ == splitn_char(data, 'ท', 1u); } @@ -2125,40 +2125,40 @@ mod tests { t("::hello::there::", "::", 3, ""); let data = "ประเทศไทย中华Việt Nam"; - assert ["ประเทศไทย", "Việt Nam"] + assert ["ประเทศไทย", "Việt Nam"]/~ == split_str (data, "中华"); - assert ["", "XXX", "YYY", ""] + assert ["", "XXX", "YYY", ""]/~ == split_str("zzXXXzzYYYzz", "zz"); - assert ["zz", "zYYYz"] + assert ["zz", "zYYYz"]/~ == split_str("zzXXXzYYYz", "XXX"); - assert ["", "XXX", "YYY", ""] == split_str(".XXX.YYY.", "."); - assert [""] == split_str("", "."); - assert ["",""] == split_str("zz", "zz"); - assert ["ok"] == split_str("ok", "z"); - assert ["","z"] == split_str("zzz", "zz"); - assert ["","","z"] == split_str("zzzzz", "zz"); + assert ["", "XXX", "YYY", ""]/~ == split_str(".XXX.YYY.", "."); + assert [""]/~ == split_str("", "."); + assert ["",""]/~ == split_str("zz", "zz"); + assert ["ok"]/~ == split_str("ok", "z"); + assert ["","z"]/~ == split_str("zzz", "zz"); + assert ["","","z"]/~ == split_str("zzzzz", "zz"); } #[test] fn test_split() { let data = "ประเทศไทย中华Việt Nam"; - assert ["ประเทศไทย中", "Việt Nam"] + assert ["ประเทศไทย中", "Việt Nam"]/~ == split (data, {|cc| cc == '华'}); - assert ["", "", "XXX", "YYY", ""] + assert ["", "", "XXX", "YYY", ""]/~ == split("zzXXXzYYYz", char::is_lowercase); - assert ["zz", "", "", "z", "", "", "z"] + assert ["zz", "", "", "z", "", "", "z"]/~ == split("zzXXXzYYYz", char::is_uppercase); - assert ["",""] == split("z", {|cc| cc == 'z'}); - assert [""] == split("", {|cc| cc == 'z'}); - assert ["ok"] == split("ok", {|cc| cc == 'z'}); + assert ["",""]/~ == split("z", {|cc| cc == 'z'}); + assert [""]/~ == split("", {|cc| cc == 'z'}); + assert ["ok"]/~ == split("ok", {|cc| cc == 'z'}); } #[test] @@ -2166,34 +2166,34 @@ mod tests { let lf = "\nMary had a little lamb\nLittle lamb\n"; let crlf = "\r\nMary had a little lamb\r\nLittle lamb\r\n"; - assert ["", "Mary had a little lamb", "Little lamb", ""] + assert ["", "Mary had a little lamb", "Little lamb", ""]/~ == lines(lf); - assert ["", "Mary had a little lamb", "Little lamb", ""] + assert ["", "Mary had a little lamb", "Little lamb", ""]/~ == lines_any(lf); - assert ["\r", "Mary had a little lamb\r", "Little lamb\r", ""] + assert ["\r", "Mary had a little lamb\r", "Little lamb\r", ""]/~ == lines(crlf); - assert ["", "Mary had a little lamb", "Little lamb", ""] + assert ["", "Mary had a little lamb", "Little lamb", ""]/~ == lines_any(crlf); - assert [""] == lines (""); - assert [""] == lines_any(""); - assert ["",""] == lines ("\n"); - assert ["",""] == lines_any("\n"); - assert ["banana"] == lines ("banana"); - assert ["banana"] == lines_any("banana"); + assert [""]/~ == lines (""); + assert [""]/~ == lines_any(""); + assert ["",""]/~ == lines ("\n"); + assert ["",""]/~ == lines_any("\n"); + assert ["banana"]/~ == lines ("banana"); + assert ["banana"]/~ == lines_any("banana"); } #[test] fn test_words () { let data = "\nMary had a little lamb\nLittle lamb\n"; - assert ["Mary","had","a","little","lamb","Little","lamb"] + assert ["Mary","had","a","little","lamb","Little","lamb"]/~ == words(data); - assert ["ok"] == words("ok"); - assert [] == words(""); + assert ["ok"]/~ == words("ok"); + assert []/~ == words(""); } #[test] @@ -2250,22 +2250,23 @@ mod tests { #[test] fn test_concat() { - fn t(v: [str], s: str) { assert (eq(concat(v), s)); } - t(["you", "know", "I'm", "no", "good"], "youknowI'mnogood"); - let v: [str] = []; + fn t(v: [str]/~, s: str) { assert (eq(concat(v), s)); } + t(["you", "know", "I'm", "no", "good"]/~, "youknowI'mnogood"); + let v: [str]/~ = []/~; t(v, ""); - t(["hi"], "hi"); + t(["hi"]/~, "hi"); } #[test] fn test_connect() { - fn t(v: [str], sep: str, s: str) { + fn t(v: [str]/~, sep: str, s: str) { assert (eq(connect(v, sep), s)); } - t(["you", "know", "I'm", "no", "good"], " ", "you know I'm no good"); - let v: [str] = []; + t(["you", "know", "I'm", "no", "good"]/~, + " ", "you know I'm no good"); + let v: [str]/~ = []/~; t(v, " ", ""); - t(["hi"], " ", "hi"); + t(["hi"]/~, " ", "hi"); } #[test] @@ -2517,7 +2518,7 @@ mod tests { #[test] fn test_unsafe_from_bytes() { - let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8]; + let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8]/~; let b = unsafe { unsafe::from_bytes(a) }; assert (b == "AAAAAAA"); } @@ -2534,7 +2535,7 @@ mod tests { 0x56_u8, 0x69_u8, 0xe1_u8, 0xbb_u8, 0x87_u8, 0x74_u8, 0x20_u8, 0x4e_u8, 0x61_u8, - 0x6d_u8]; + 0x6d_u8]/~; assert ss == from_bytes(bb); } @@ -2552,7 +2553,7 @@ mod tests { 0x56_u8, 0x69_u8, 0xe1_u8, 0xbb_u8, 0x87_u8, 0x74_u8, 0x20_u8, 0x4e_u8, 0x61_u8, - 0x6d_u8]; + 0x6d_u8]/~; let _x = from_bytes(bb); } @@ -2560,7 +2561,7 @@ mod tests { #[test] fn test_from_buf() { unsafe { - let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]; + let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]/~; let b = vec::unsafe::to_ptr(a); let c = unsafe::from_buf(b); assert (c == "AAAAAAA"); @@ -2609,7 +2610,7 @@ mod tests { fn vec_str_conversions() { let s1: str = "All mimsy were the borogoves"; - let v: [u8] = bytes(s1); + let v: [u8]/~ = bytes(s1); let s2: str = from_bytes(v); let mut i: uint = 0u; let n1: uint = len(s1); @@ -2774,7 +2775,7 @@ mod tests { #[test] fn test_chars() { let ss = "ศไทย中华Việt Nam"; - assert ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'] + assert ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']/~ == chars(ss); } @@ -2785,7 +2786,7 @@ mod tests { [0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16, 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16, - 0xd800_u16, 0xdf30_u16, 0x000a_u16]), + 0xd800_u16, 0xdf30_u16, 0x000a_u16]/~), ("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n", [0xd801_u16, 0xdc12_u16, 0xd801_u16, @@ -2793,7 +2794,7 @@ mod tests { 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16, - 0x000a_u16]), + 0x000a_u16]/~), ("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n", [0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16, @@ -2802,7 +2803,7 @@ mod tests { 0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, - 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]), + 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]/~), ("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n", [0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16, @@ -2815,7 +2816,7 @@ mod tests { 0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16, 0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16, - 0x000a_u16 ]) ]; + 0x000a_u16 ]/~) ]/~; for vec::each(pairs) {|p| let (s, u) = p; diff --git a/src/libcore/to_str.rs b/src/libcore/to_str.rs index 3af4dec9b6d..229afc810e5 100644 --- a/src/libcore/to_str.rs +++ b/src/libcore/to_str.rs @@ -56,7 +56,7 @@ impl of to_str for (A, B, C){ } } -impl of to_str for [A] { +impl of to_str for [A]/~ { fn to_str() -> str { let mut acc = "[", first = true; for vec::each(self) {|elt| @@ -98,11 +98,12 @@ mod tests { } fn test_vectors() { - let x: [int] = []; - assert x.to_str() == "[]"; - assert [1].to_str() == "[1]"; - assert [1, 2, 3].to_str() == "[1, 2, 3]"; - assert [[], [1], [1, 1]].to_str() == "[[], [1], [1, 1]]"; + let x: [int]/~ = []/~; + assert x.to_str() == "[]/~"; + assert [1]/~.to_str() == "[1]/~"; + assert [1, 2, 3]/~.to_str() == "[1, 2, 3]/~"; + assert [[]/~, [1]/~, [1, 1]/~]/~.to_str() == + "[[]/~, [1]/~, [1, 1]/~]/~"; } fn test_pointer_types() { diff --git a/src/libcore/uint-template.rs b/src/libcore/uint-template.rs index d341921eb03..6ec8d2e8789 100644 --- a/src/libcore/uint-template.rs +++ b/src/libcore/uint-template.rs @@ -88,7 +88,7 @@ Parse a buffer of bytes `buf` must not be empty "] -fn parse_buf(buf: [u8], radix: uint) -> option { +fn parse_buf(buf: [u8]/~, radix: uint) -> option { if vec::len(buf) == 0u { ret none; } let mut i = vec::len(buf) - 1u; let mut power = 1u as T; diff --git a/src/libcore/unsafe.rs b/src/libcore/unsafe.rs index 85e2f8d8934..3ebcde80281 100644 --- a/src/libcore/unsafe.rs +++ b/src/libcore/unsafe.rs @@ -33,7 +33,7 @@ Both types must have the same size and alignment. # Example - assert transmute(\"L\") == [76u8, 0u8]; + assert transmute(\"L\") == [76u8, 0u8]/~; "] unsafe fn transmute(-thing: L) -> G { let newthing = reinterpret_cast(thing); @@ -62,7 +62,7 @@ mod tests { #[test] fn test_transmute2() { unsafe { - assert transmute("L") == [76u8, 0u8]; + assert transmute("L") == [76u8, 0u8]/~; } } } diff --git a/src/libcore/vec.rs b/src/libcore/vec.rs index 3f34ee5aabb..0f9cedd84c6 100644 --- a/src/libcore/vec.rs +++ b/src/libcore/vec.rs @@ -126,7 +126,7 @@ capacity, then no action is taken. * v - A vector * n - The number of elements to reserve space for "] -fn reserve(&v: [const T], n: uint) { +fn reserve(&v: [const T]/~, n: uint) { // Only make the (slow) call into the runtime if we have to if capacity(v) < n { let ptr = ptr::addr_of(v) as **unsafe::vec_repr; @@ -150,7 +150,7 @@ capacity, then no action is taken. * v - A vector * n - The number of elements to reserve space for "] -fn reserve_at_least(&v: [const T], n: uint) { +fn reserve_at_least(&v: [const T]/~, n: uint) { reserve(v, uint::next_power_of_two(n)); } @@ -158,7 +158,7 @@ fn reserve_at_least(&v: [const T], n: uint) { Returns the number of elements the vector can hold without reallocating "] #[inline(always)] -pure fn capacity(&&v: [const T]) -> uint { +pure fn capacity(&&v: [const T]/~) -> uint { unsafe { let repr: **unsafe::vec_repr = ::unsafe::reinterpret_cast(addr_of(v)); (**repr).alloc / sys::size_of::() @@ -177,8 +177,8 @@ Creates and initializes an immutable vector. Creates an immutable vector of size `n_elts` and initializes the elements to the value returned by the function `op`. "] -pure fn from_fn(n_elts: uint, op: init_op) -> [T] { - let mut v = []; +pure fn from_fn(n_elts: uint, op: init_op) -> [T]/~ { + let mut v = []/~; unchecked{reserve(v, n_elts);} let mut i: uint = 0u; while i < n_elts unsafe { push(v, op(i)); i += 1u; } @@ -191,8 +191,8 @@ Creates and initializes an immutable vector. Creates an immutable vector of size `n_elts` and initializes the elements to the value `t`. "] -pure fn from_elem(n_elts: uint, t: T) -> [T] { - let mut v = []; +pure fn from_elem(n_elts: uint, t: T) -> [T]/~ { + let mut v = []/~; unchecked{reserve(v, n_elts)} let mut i: uint = 0u; unsafe { // because push is impure @@ -202,12 +202,12 @@ pure fn from_elem(n_elts: uint, t: T) -> [T] { } #[doc = "Produces a mut vector from an immutable vector."] -fn to_mut(+v: [T]) -> [mut T] { +fn to_mut(+v: [T]/~) -> [mut T]/~ { unsafe { ::unsafe::transmute(v) } } #[doc = "Produces an immutable vector from a mut vector."] -fn from_mut(+v: [mut T]) -> [T] { +fn from_mut(+v: [mut T]/~) -> [T]/~ { unsafe { ::unsafe::transmute(v) } } @@ -217,18 +217,18 @@ fn from_mut(+v: [mut T]) -> [T] { pure fn head(v: [const T]/&) -> T { v[0] } #[doc = "Returns a vector containing all but the first element of a slice"] -pure fn tail(v: [const T]/&) -> [T] { +pure fn tail(v: [const T]/&) -> [T]/~ { ret slice(v, 1u, len(v)); } #[doc = "Returns a vector containing all but the first `n` \ elements of a slice"] -pure fn tailn(v: [const T]/&, n: uint) -> [T] { +pure fn tailn(v: [const T]/&, n: uint) -> [T]/~ { slice(v, n, len(v)) } #[doc = "Returns a vector containing all but the last element of a slice"] -pure fn init(v: [const T]/&) -> [T] { +pure fn init(v: [const T]/&) -> [T]/~ { assert len(v) != 0u; slice(v, 0u, len(v) - 1u) } @@ -251,10 +251,10 @@ pure fn last_opt(v: [const T]/&) -> option { } #[doc = "Returns a copy of the elements from [`start`..`end`) from `v`."] -pure fn slice(v: [const T]/&, start: uint, end: uint) -> [T] { +pure fn slice(v: [const T]/&, start: uint, end: uint) -> [T]/~ { assert (start <= end); assert (end <= len(v)); - let mut result = []; + let mut result = []/~; unchecked { push_all(result, view(v, start, end)); } @@ -276,12 +276,12 @@ pure fn view(v: [const T]/&, start: uint, end: uint) -> [T]/&a { #[doc = " Split the vector `v` by applying each element against the predicate `f`. "] -fn split(v: [T]/&, f: fn(T) -> bool) -> [[T]] { +fn split(v: [T]/&, f: fn(T) -> bool) -> [[T]/~]/~ { let ln = len(v); - if (ln == 0u) { ret [] } + if (ln == 0u) { ret []/~ } let mut start = 0u; - let mut result = []; + let mut result = []/~; while start < ln { alt position_between(v, start, ln, f) { none { break } @@ -299,13 +299,13 @@ fn split(v: [T]/&, f: fn(T) -> bool) -> [[T]] { Split the vector `v` by applying each element against the predicate `f` up to `n` times. "] -fn splitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { +fn splitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]/~]/~ { let ln = len(v); - if (ln == 0u) { ret [] } + if (ln == 0u) { ret []/~ } let mut start = 0u; let mut count = n; - let mut result = []; + let mut result = []/~; while start < ln && count > 0u { alt position_between(v, start, ln, f) { none { break } @@ -325,12 +325,12 @@ fn splitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { Reverse split the vector `v` by applying each element against the predicate `f`. "] -fn rsplit(v: [T]/&, f: fn(T) -> bool) -> [[T]] { +fn rsplit(v: [T]/&, f: fn(T) -> bool) -> [[T]/~]/~ { let ln = len(v); - if (ln == 0u) { ret [] } + if (ln == 0u) { ret []/~ } let mut end = ln; - let mut result = []; + let mut result = []/~; while end > 0u { alt rposition_between(v, 0u, end, f) { none { break } @@ -348,13 +348,13 @@ fn rsplit(v: [T]/&, f: fn(T) -> bool) -> [[T]] { Reverse split the vector `v` by applying each element against the predicate `f` up to `n times. "] -fn rsplitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { +fn rsplitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]/~]/~ { let ln = len(v); - if (ln == 0u) { ret [] } + if (ln == 0u) { ret []/~ } let mut end = ln; let mut count = n; - let mut result = []; + let mut result = []/~; while end > 0u && count > 0u { alt rposition_between(v, 0u, end, f) { none { break } @@ -373,11 +373,11 @@ fn rsplitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { // Mutators #[doc = "Removes the first element from a vector and return it"] -fn shift(&v: [T]) -> T { +fn shift(&v: [T]/~) -> T { let ln = len::(v); assert (ln > 0u); - let mut vv = []; + let mut vv = []/~; v <-> vv; unsafe { @@ -399,8 +399,8 @@ fn shift(&v: [T]) -> T { } #[doc = "Prepend an element to the vector"] -fn unshift(&v: [T], +x: T) { - let mut vv = [x]; +fn unshift(&v: [T]/~, +x: T) { + let mut vv = [x]/~; v <-> vv; while len(vv) > 0 { push(v, shift(vv)); @@ -408,7 +408,7 @@ fn unshift(&v: [T], +x: T) { } #[doc = "Remove the last element from a vector and return it"] -fn pop(&v: [const T]) -> T { +fn pop(&v: [const T]/~) -> T { let ln = len(v); assert ln > 0u; let valptr = ptr::mut_addr_of(v[ln - 1u]); @@ -421,7 +421,7 @@ fn pop(&v: [const T]) -> T { #[doc = "Append an element to a vector"] #[inline(always)] -fn push(&v: [const T], +initval: T) { +fn push(&v: [const T]/~, +initval: T) { unsafe { let repr: **unsafe::vec_repr = ::unsafe::reinterpret_cast(addr_of(v)); let fill = (**repr).fill; @@ -438,7 +438,7 @@ fn push(&v: [const T], +initval: T) { } } -fn push_slow(&v: [const T], +initval: T) { +fn push_slow(&v: [const T]/~, +initval: T) { unsafe { let ln = v.len(); reserve_at_least(v, ln + 1u); @@ -453,8 +453,9 @@ fn push_slow(&v: [const T], +initval: T) { } #[inline(always)] -fn push_all(&v: [const T], rhs: [const T]/&) { +fn push_all(&v: [const T]/~, rhs: [const T]/&) { reserve(v, v.len() + rhs.len()); + for uint::range(0u, rhs.len()) {|i| push(v, rhs[i]); } @@ -462,8 +463,8 @@ fn push_all(&v: [const T], rhs: [const T]/&) { // Appending #[inline(always)] -pure fn append(lhs: [T]/&, rhs: [const T]/&) -> [T] { - let mut v = []; +pure fn append(lhs: [T]/&, rhs: [const T]/&) -> [T]/~ { + let mut v = []/~; let mut i = 0u; while i < lhs.len() { unsafe { // This is impure, but it appears pure to the caller. @@ -482,8 +483,8 @@ pure fn append(lhs: [T]/&, rhs: [const T]/&) -> [T] { } #[inline(always)] -pure fn append_mut(lhs: [mut T]/&, rhs: [const T]/&) -> [mut T] { - let mut v = [mut]; +pure fn append_mut(lhs: [mut T]/&, rhs: [const T]/&) -> [mut T]/~ { + let mut v = [mut]/~; let mut i = 0u; while i < lhs.len() { unsafe { // This is impure, but it appears pure to the caller. @@ -510,7 +511,7 @@ Expands a vector in place, initializing the new elements to a given value * n - The number of elements to add * initval - The value for the new elements "] -fn grow(&v: [const T], n: uint, initval: T) { +fn grow(&v: [const T]/~, n: uint, initval: T) { reserve_at_least(v, len(v) + n); let mut i: uint = 0u; @@ -530,7 +531,7 @@ Function `init_op` is called `n` times with the values [0..`n`) * init_op - A function to call to retreive each appended element's value "] -fn grow_fn(&v: [const T], n: uint, op: init_op) { +fn grow_fn(&v: [const T]/~, n: uint, op: init_op) { reserve_at_least(v, len(v) + n); let mut i: uint = 0u; while i < n { push(v, op(i)); i += 1u; } @@ -545,7 +546,7 @@ of the vector, expands the vector by replicating `initval` to fill the intervening space. "] #[inline(always)] -fn grow_set(&v: [mut T], index: uint, initval: T, val: T) { +fn grow_set(&v: [mut T]/~, index: uint, initval: T, val: T) { if index >= len(v) { grow(v, index - len(v) + 1u, initval); } v[index] = val; } @@ -556,8 +557,8 @@ fn grow_set(&v: [mut T], index: uint, initval: T, val: T) { #[doc = " Apply a function to each element of a vector and return the results "] -pure fn map(v: [T]/&, f: fn(T) -> U) -> [U] { - let mut result = []; +pure fn map(v: [T]/&, f: fn(T) -> U) -> [U]/~ { + let mut result = []/~; unchecked{reserve(result, len(v));} for each(v) {|elem| unsafe { push(result, f(elem)); } } ret result; @@ -566,8 +567,8 @@ pure fn map(v: [T]/&, f: fn(T) -> U) -> [U] { #[doc = " Apply a function to each element of a vector and return the results "] -pure fn mapi(v: [T]/&, f: fn(uint, T) -> U) -> [U] { - let mut result = []; +pure fn mapi(v: [T]/&, f: fn(uint, T) -> U) -> [U]/~ { + let mut result = []/~; unchecked{reserve(result, len(v));} for eachi(v) {|i, elem| unsafe { push(result, f(i, elem)); } } ret result; @@ -577,8 +578,8 @@ pure fn mapi(v: [T]/&, f: fn(uint, T) -> U) -> [U] { Apply a function to each element of a vector and return a concatenation of each result vector "] -pure fn flat_map(v: [T]/&, f: fn(T) -> [U]) -> [U] { - let mut result = []; +pure fn flat_map(v: [T]/&, f: fn(T) -> [U]/~) -> [U]/~ { + let mut result = []/~; for each(v) {|elem| result += f(elem); } ret result; } @@ -587,10 +588,10 @@ pure fn flat_map(v: [T]/&, f: fn(T) -> [U]) -> [U] { Apply a function to each pair of elements and return the results "] pure fn map2(v0: [T]/&, v1: [U]/&, - f: fn(T, U) -> V) -> [V] { + f: fn(T, U) -> V) -> [V]/~ { let v0_len = len(v0); if v0_len != len(v1) { fail; } - let mut u: [V] = []; + let mut u: [V]/~ = []/~; let mut i = 0u; while i < v0_len { unsafe { push(u, f(copy v0[i], copy v1[i])) }; @@ -606,8 +607,8 @@ If function `f` returns `none` then that element is excluded from the resulting vector. "] pure fn filter_map(v: [T]/&, f: fn(T) -> option) - -> [U] { - let mut result = []; + -> [U]/~ { + let mut result = []/~; for each(v) {|elem| alt f(elem) { none {/* no-op */ } @@ -624,8 +625,8 @@ holds. Apply function `f` to each element of `v` and return a vector containing only those elements for which `f` returned true. "] -pure fn filter(v: [T]/&, f: fn(T) -> bool) -> [T] { - let mut result = []; +pure fn filter(v: [T]/&, f: fn(T) -> bool) -> [T]/~ { + let mut result = []/~; for each(v) {|elem| if f(elem) { unsafe { push(result, elem); } } } @@ -637,8 +638,8 @@ Concatenate a vector of vectors. Flattens a vector of vectors of T into a single vector of T. "] -pure fn concat(v: [[T]]/&) -> [T] { - let mut r = []; +pure fn concat(v: [[T]/~]/&) -> [T]/~ { + let mut r = []/~; for each(v) {|inner| unsafe { push_all(r, inner); } } ret r; } @@ -646,8 +647,8 @@ pure fn concat(v: [[T]]/&) -> [T] { #[doc = " Concatenate a vector of vectors, placing a given separator between each "] -pure fn connect(v: [[T]]/&, sep: T) -> [T] { - let mut r: [T] = []; +pure fn connect(v: [[T]/~]/&, sep: T) -> [T]/~ { + let mut r: [T]/~ = []/~; let mut first = true; for each(v) {|inner| if first { first = false; } else { unsafe { push(r, sep); } } @@ -873,9 +874,9 @@ vector contains the first element of the i-th tuple of the input vector, and the i-th element of the second vector contains the second element of the i-th tuple of the input vector. "] -pure fn unzip(v: [(T, U)]/&) -> ([T], [U]) { - let mut as = [], bs = []; - for each(v) {|p| let (a, b) = p; as += [a]; bs += [b]; } +pure fn unzip(v: [(T, U)]/&) -> ([T]/~, [U]/~) { + let mut as = []/~, bs = []/~; + for each(v) {|p| let (a, b) = p; as += [a]/~; bs += [b]/~; } ret (as, bs); } @@ -885,12 +886,12 @@ Convert two vectors to a vector of pairs Returns a vector of tuples, where the i-th tuple contains contains the i-th elements from each of the input vectors. "] -pure fn zip(v: [const T]/&, u: [const U]/&) -> [(T, U)] { - let mut zipped = []; +pure fn zip(v: [const T]/&, u: [const U]/&) -> [(T, U)]/~ { + let mut zipped = []/~; let sz = len(v); let mut i = 0u; assert sz == len(u); - while i < sz { zipped += [(v[i], u[i])]; i += 1u; } + while i < sz { zipped += [(v[i], u[i])]/~; i += 1u; } ret zipped; } @@ -903,12 +904,12 @@ Swaps two elements in a vector * a - The index of the first element * b - The index of the second element "] -fn swap(&&v: [mut T], a: uint, b: uint) { +fn swap(&&v: [mut T]/~, a: uint, b: uint) { v[a] <-> v[b]; } #[doc = "Reverse the order of elements in a vector, in place"] -fn reverse(v: [mut T]) { +fn reverse(v: [mut T]/~) { let mut i: uint = 0u; let ln = len::(v); while i < ln / 2u { v[i] <-> v[ln - i - 1u]; i += 1u; } @@ -916,12 +917,12 @@ fn reverse(v: [mut T]) { #[doc = "Returns a vector with the order of elements reversed"] -fn reversed(v: [const T]/&) -> [T] { - let mut rs: [T] = []; +fn reversed(v: [const T]/&) -> [T]/~ { + let mut rs: [T]/~ = []/~; let mut i = len::(v); if i == 0u { ret rs; } else { i -= 1u; } - while i != 0u { rs += [v[i]]; i -= 1u; } - rs += [v[0]]; + while i != 0u { rs += [v[i]]/~; i -= 1u; } + rs += [v[0]]/~; ret rs; } @@ -1064,28 +1065,28 @@ lexicographically sorted). The total number of permutations produced is `len(v)!`. If `v` contains repeated elements, then some permutations are repeated. "] -pure fn permute(v: [T]/&, put: fn([T])) { +pure fn permute(v: [T]/&, put: fn([T]/~)) { let ln = len(v); if ln == 0u { - put([]); + put([]/~); } else { let mut i = 0u; while i < ln { let elt = v[i]; let rest = slice(v, 0u, i) + slice(v, i+1u, ln); - permute(rest) {|permutation| put([elt] + permutation)} + permute(rest) {|permutation| put([elt]/~ + permutation)} i += 1u; } } } -pure fn windowed(nn: uint, xx: [TT]/&) -> [[TT]] { - let mut ww = []; +pure fn windowed(nn: uint, xx: [TT]/&) -> [[TT]/~]/~ { + let mut ww = []/~; assert 1u <= nn; vec::iteri (xx, {|ii, _x| let len = vec::len(xx); if ii+nn <= len { - ww += [vec::slice(xx, ii, ii+nn)]; + ww += [vec::slice(xx, ii, ii+nn)]/~; } }); ret ww; @@ -1146,16 +1147,16 @@ pure fn unpack_mut_slice(s: [mut T]/&, } } -impl extensions for [T] { +impl extensions for [T]/~ { #[inline(always)] - pure fn +(rhs: [T]/&) -> [T] { + pure fn +(rhs: [T]/&) -> [T]/~ { append(self, rhs) } } -impl extensions for [mut T] { +impl extensions for [mut T]/~ { #[inline(always)] - pure fn +(rhs: [mut T]/&) -> [mut T] { + pure fn +(rhs: [mut T]/&) -> [mut T]/~ { append_mut(self, rhs) } } @@ -1180,7 +1181,7 @@ impl extensions/& for [const T]/& { pure fn head() -> T { head(self) } #[doc = "Returns all but the last elemnt of a vector"] #[inline] - pure fn init() -> [T] { init(self) } + pure fn init() -> [T]/~ { init(self) } #[doc = " Returns the last element of a `v`, failing if the vector is empty. "] @@ -1188,10 +1189,10 @@ impl extensions/& for [const T]/& { pure fn last() -> T { last(self) } #[doc = "Returns a copy of the elements from [`start`..`end`) from `v`."] #[inline] - pure fn slice(start: uint, end: uint) -> [T] { slice(self, start, end) } + pure fn slice(start: uint, end: uint) -> [T]/~ { slice(self, start, end) } #[doc = "Returns all but the first element of a vector"] #[inline] - pure fn tail() -> [T] { tail(self) } + pure fn tail() -> [T]/~ { tail(self) } } #[doc = "Extension methods for vectors"] @@ -1259,12 +1260,12 @@ impl extensions/& for [T]/& { Apply a function to each element of a vector and return the results "] #[inline] - pure fn map(f: fn(T) -> U) -> [U] { map(self, f) } + pure fn map(f: fn(T) -> U) -> [U]/~ { map(self, f) } #[doc = " Apply a function to the index and value of each element in the vector and return the results "] - pure fn mapi(f: fn(uint, T) -> U) -> [U] { + pure fn mapi(f: fn(uint, T) -> U) -> [U]/~ { mapi(self, f) } #[doc = "Returns true if the function returns true for all elements. @@ -1278,7 +1279,7 @@ impl extensions/& for [T]/& { of each result vector "] #[inline] - pure fn flat_map(f: fn(T) -> [U]) -> [U] { flat_map(self, f) } + pure fn flat_map(f: fn(T) -> [U]/~) -> [U]/~ { flat_map(self, f) } #[doc = " Apply a function to each element of a vector and return the results @@ -1286,7 +1287,7 @@ impl extensions/& for [T]/& { the resulting vector. "] #[inline] - pure fn filter_map(f: fn(T) -> option) -> [U] { + pure fn filter_map(f: fn(T) -> option) -> [U]/~ { filter_map(self, f) } } @@ -1301,7 +1302,7 @@ impl extensions/& for [T]/& { only those elements for which `f` returned true. "] #[inline] - pure fn filter(f: fn(T) -> bool) -> [T] { filter(self, f) } + pure fn filter(f: fn(T) -> bool) -> [T]/~ { filter(self, f) } #[doc = " Search for the first element that matches a given predicate @@ -1342,7 +1343,7 @@ mod unsafe { * elts - The number of elements in the buffer "] #[inline(always)] - unsafe fn from_buf(ptr: *T, elts: uint) -> [T] { + unsafe fn from_buf(ptr: *T, elts: uint) -> [T]/~ { ret ::unsafe::reinterpret_cast( rustrt::vec_from_buf_shared(sys::get_type_desc::(), ptr as *(), @@ -1357,7 +1358,7 @@ mod unsafe { the vector is actually the specified size. "] #[inline(always)] - unsafe fn set_len(&&v: [const T], new_len: uint) { + unsafe fn set_len(&&v: [const T]/~, new_len: uint) { let repr: **vec_repr = ::unsafe::reinterpret_cast(addr_of(v)); (**repr).fill = new_len * sys::size_of::(); } @@ -1372,7 +1373,7 @@ mod unsafe { would also make any pointers to it invalid. "] #[inline(always)] - unsafe fn to_ptr(v: [const T]) -> *T { + unsafe fn to_ptr(v: [const T]/~) -> *T { let repr: **vec_repr = ::unsafe::reinterpret_cast(addr_of(v)); ret ::unsafe::reinterpret_cast(addr_of((**repr).data)); } @@ -1397,7 +1398,7 @@ mod u8 { export hash; #[doc = "Bytewise string comparison"] - pure fn cmp(&&a: [u8], &&b: [u8]) -> int { + pure fn cmp(&&a: [u8]/~, &&b: [u8]/~) -> int { let a_len = len(a); let b_len = len(b); let n = uint::min(a_len, b_len) as libc::size_t; @@ -1418,25 +1419,25 @@ mod u8 { } #[doc = "Bytewise less than or equal"] - pure fn lt(&&a: [u8], &&b: [u8]) -> bool { cmp(a, b) < 0 } + pure fn lt(&&a: [u8]/~, &&b: [u8]/~) -> bool { cmp(a, b) < 0 } #[doc = "Bytewise less than or equal"] - pure fn le(&&a: [u8], &&b: [u8]) -> bool { cmp(a, b) <= 0 } + pure fn le(&&a: [u8]/~, &&b: [u8]/~) -> bool { cmp(a, b) <= 0 } #[doc = "Bytewise equality"] - pure fn eq(&&a: [u8], &&b: [u8]) -> bool { unsafe { cmp(a, b) == 0 } } + pure fn eq(&&a: [u8]/~, &&b: [u8]/~) -> bool { unsafe { cmp(a, b) == 0 } } #[doc = "Bytewise inequality"] - pure fn ne(&&a: [u8], &&b: [u8]) -> bool { unsafe { cmp(a, b) != 0 } } + pure fn ne(&&a: [u8]/~, &&b: [u8]/~) -> bool { unsafe { cmp(a, b) != 0 } } #[doc ="Bytewise greater than or equal"] - pure fn ge(&&a: [u8], &&b: [u8]) -> bool { cmp(a, b) >= 0 } + pure fn ge(&&a: [u8]/~, &&b: [u8]/~) -> bool { cmp(a, b) >= 0 } #[doc = "Bytewise greater than"] - pure fn gt(&&a: [u8], &&b: [u8]) -> bool { cmp(a, b) > 0 } + pure fn gt(&&a: [u8]/~, &&b: [u8]/~) -> bool { cmp(a, b) > 0 } #[doc = "String hash function"] - fn hash(&&s: [u8]) -> uint { + fn hash(&&s: [u8]/~) -> uint { /* Seems to have been tragically copy/pasted from str.rs, or vice versa. But I couldn't figure out how to abstract it out. -- tjc */ @@ -1465,14 +1466,14 @@ impl extensions/& of iter::base_iter for [const A]/& { fn count(x: A) -> uint { iter::count(self, x) } } impl extensions/& for [const A]/& { - fn filter_to_vec(pred: fn(A) -> bool) -> [A] { + fn filter_to_vec(pred: fn(A) -> bool) -> [A]/~ { iter::filter_to_vec(self, pred) } - fn map_to_vec(op: fn(A) -> B) -> [B] { iter::map_to_vec(self, op) } - fn to_vec() -> [A] { iter::to_vec(self) } + fn map_to_vec(op: fn(A) -> B) -> [B]/~ { iter::map_to_vec(self, op) } + fn to_vec() -> [A]/~ { iter::to_vec(self) } // FIXME--bug in resolve prevents this from working (#2611) - // fn flat_map_to_vec>(op: fn(A) -> IB) -> [B] { + // fn flat_map_to_vec>(op: fn(A) -> IB) -> [B]/~ { // iter::flat_map_to_vec(self, op) // } @@ -1504,7 +1505,7 @@ mod tests { fn test_unsafe_ptrs() { unsafe { // Test on-stack copy-from-buf. - let a = [1, 2, 3]; + let a = [1, 2, 3]/~; let mut ptr = unsafe::to_ptr(a); let b = unsafe::from_buf(ptr, 3u); assert (len(b) == 3u); @@ -1513,7 +1514,7 @@ mod tests { assert (b[2] == 3); // Test on-heap copy-from-buf. - let c = [1, 2, 3, 4, 5]; + let c = [1, 2, 3, 4, 5]/~; ptr = unsafe::to_ptr(c); let d = unsafe::from_buf(ptr, 5u); assert (len(d) == 5u); @@ -1564,58 +1565,58 @@ mod tests { #[test] fn test_is_empty() { - assert (is_empty::([])); - assert (!is_empty([0])); + assert (is_empty::([]/~)); + assert (!is_empty([0]/~)); } #[test] fn test_is_not_empty() { - assert (is_not_empty([0])); - assert (!is_not_empty::([])); + assert (is_not_empty([0]/~)); + assert (!is_not_empty::([]/~)); } #[test] fn test_head() { - let a = [11, 12]; + let a = [11, 12]/~; assert (head(a) == 11); } #[test] fn test_tail() { - let mut a = [11]; - assert (tail(a) == []); + let mut a = [11]/~; + assert (tail(a) == []/~); - a = [11, 12]; - assert (tail(a) == [12]); + a = [11, 12]/~; + assert (tail(a) == [12]/~); } #[test] fn test_last() { - let mut n = last_opt([]); + let mut n = last_opt([]/~); assert (n == none); - n = last_opt([1, 2, 3]); + n = last_opt([1, 2, 3]/~); assert (n == some(3)); - n = last_opt([1, 2, 3, 4, 5]); + n = last_opt([1, 2, 3, 4, 5]/~); assert (n == some(5)); } #[test] fn test_slice() { // Test on-stack -> on-stack slice. - let mut v = slice([1, 2, 3], 1u, 3u); + let mut v = slice([1, 2, 3]/~, 1u, 3u); assert (len(v) == 2u); assert (v[0] == 2); assert (v[1] == 3); // Test on-heap -> on-stack slice. - v = slice([1, 2, 3, 4, 5], 0u, 3u); + v = slice([1, 2, 3, 4, 5]/~, 0u, 3u); assert (len(v) == 3u); assert (v[0] == 1); assert (v[1] == 2); assert (v[2] == 3); // Test on-heap -> on-heap slice. - v = slice([1, 2, 3, 4, 5, 6], 1u, 6u); + v = slice([1, 2, 3, 4, 5, 6]/~, 1u, 6u); assert (len(v) == 5u); assert (v[0] == 2); assert (v[1] == 3); @@ -1627,7 +1628,7 @@ mod tests { #[test] fn test_pop() { // Test on-stack pop. - let mut v = [1, 2, 3]; + let mut v = [1, 2, 3]/~; let mut e = pop(v); assert (len(v) == 2u); assert (v[0] == 1); @@ -1635,7 +1636,7 @@ mod tests { assert (e == 3); // Test on-heap pop. - v = [1, 2, 3, 4, 5]; + v = [1, 2, 3, 4, 5]/~; e = pop(v); assert (len(v) == 4u); assert (v[0] == 1); @@ -1648,7 +1649,7 @@ mod tests { #[test] fn test_push() { // Test on-stack push(). - let mut v = []; + let mut v = []/~; push(v, 1); assert (len(v) == 1u); assert (v[0] == 1); @@ -1663,7 +1664,7 @@ mod tests { #[test] fn test_grow() { // Test on-stack grow(). - let mut v = []; + let mut v = []/~; grow(v, 2u, 1); assert (len(v) == 2u); assert (v[0] == 1); @@ -1681,7 +1682,7 @@ mod tests { #[test] fn test_grow_fn() { - let mut v = []; + let mut v = []/~; grow_fn(v, 3u, square); assert (len(v) == 3u); assert (v[0] == 0u); @@ -1691,7 +1692,7 @@ mod tests { #[test] fn test_grow_set() { - let mut v = [mut 1, 2, 3]; + let mut v = [mut 1, 2, 3]/~; grow_set(v, 4u, 4, 5); assert (len(v) == 5u); assert (v[0] == 1); @@ -1704,7 +1705,7 @@ mod tests { #[test] fn test_map() { // Test on-stack map. - let mut v = [1u, 2u, 3u]; + let mut v = [1u, 2u, 3u]/~; let mut w = map(v, square_ref); assert (len(w) == 3u); assert (w[0] == 1u); @@ -1712,7 +1713,7 @@ mod tests { assert (w[2] == 9u); // Test on-heap map. - v = [1u, 2u, 3u, 4u, 5u]; + v = [1u, 2u, 3u, 4u, 5u]/~; w = map(v, square_ref); assert (len(w) == 5u); assert (w[0] == 1u); @@ -1726,8 +1727,8 @@ mod tests { fn test_map2() { fn times(&&x: int, &&y: int) -> int { ret x * y; } let f = times; - let v0 = [1, 2, 3, 4, 5]; - let v1 = [5, 4, 3, 2, 1]; + let v0 = [1, 2, 3, 4, 5]/~; + let v1 = [5, 4, 3, 2, 1]/~; let u = map2::(v0, v1, f); let mut i = 0; while i < 5 { assert (v0[i] * v1[i] == u[i]); i += 1; } @@ -1736,14 +1737,14 @@ mod tests { #[test] fn test_filter_map() { // Test on-stack filter-map. - let mut v = [1u, 2u, 3u]; + let mut v = [1u, 2u, 3u]/~; let mut w = filter_map(v, square_if_odd); assert (len(w) == 2u); assert (w[0] == 1u); assert (w[1] == 9u); // Test on-heap filter-map. - v = [1u, 2u, 3u, 4u, 5u]; + v = [1u, 2u, 3u, 4u, 5u]/~; w = filter_map(v, square_if_odd); assert (len(w) == 3u); assert (w[0] == 1u); @@ -1756,32 +1757,32 @@ mod tests { } else { ret option::none::; } } fn halve_for_sure(&&i: int) -> int { ret i / 2; } - let all_even: [int] = [0, 2, 8, 6]; - let all_odd1: [int] = [1, 7, 3]; - let all_odd2: [int] = []; - let mix: [int] = [9, 2, 6, 7, 1, 0, 0, 3]; - let mix_dest: [int] = [1, 3, 0, 0]; + let all_even: [int]/~ = [0, 2, 8, 6]/~; + let all_odd1: [int]/~ = [1, 7, 3]/~; + let all_odd2: [int]/~ = []/~; + let mix: [int]/~ = [9, 2, 6, 7, 1, 0, 0, 3]/~; + let mix_dest: [int]/~ = [1, 3, 0, 0]/~; assert (filter_map(all_even, halve) == map(all_even, halve_for_sure)); - assert (filter_map(all_odd1, halve) == []); - assert (filter_map(all_odd2, halve) == []); + assert (filter_map(all_odd1, halve) == []/~); + assert (filter_map(all_odd2, halve) == []/~); assert (filter_map(mix, halve) == mix_dest); } #[test] fn test_filter() { - assert filter([1u, 2u, 3u], is_odd) == [1u, 3u]; - assert filter([1u, 2u, 4u, 8u, 16u], is_three) == []; + assert filter([1u, 2u, 3u]/~, is_odd) == [1u, 3u]/~; + assert filter([1u, 2u, 4u, 8u, 16u]/~, is_three) == []/~; } #[test] fn test_foldl() { // Test on-stack fold. - let mut v = [1u, 2u, 3u]; + let mut v = [1u, 2u, 3u]/~; let mut sum = foldl(0u, v, add); assert (sum == 6u); // Test on-heap fold. - v = [1u, 2u, 3u, 4u, 5u]; + v = [1u, 2u, 3u, 4u, 5u]/~; sum = foldl(0u, v, add); assert (sum == 15u); } @@ -1791,7 +1792,7 @@ mod tests { fn sub(&&a: int, &&b: int) -> int { a - b } - let mut v = [1, 2, 3, 4]; + let mut v = [1, 2, 3, 4]/~; let sum = foldl(0, v, sub); assert sum == -10; } @@ -1801,7 +1802,7 @@ mod tests { fn sub(&&a: int, &&b: int) -> int { a - b } - let mut v = [1, 2, 3, 4]; + let mut v = [1, 2, 3, 4]/~; let sum = foldr(v, 0, sub); assert sum == -2; } @@ -1809,21 +1810,21 @@ mod tests { #[test] fn test_iter_empty() { let mut i = 0; - iter::([], { |_v| i += 1 }); + iter::([]/~, { |_v| i += 1 }); assert i == 0; } #[test] fn test_iter_nonempty() { let mut i = 0; - iter([1, 2, 3], { |v| i += v }); + iter([1, 2, 3]/~, { |v| i += v }); assert i == 6; } #[test] fn test_iteri() { let mut i = 0; - iteri([1, 2, 3], { |j, v| + iteri([1, 2, 3]/~, { |j, v| if i == 0 { assert v == 1; } assert j + 1u == v as uint; i += v; @@ -1834,14 +1835,14 @@ mod tests { #[test] fn test_riter_empty() { let mut i = 0; - riter::([], { |_v| i += 1 }); + riter::([]/~, { |_v| i += 1 }); assert i == 0; } #[test] fn test_riter_nonempty() { let mut i = 0; - riter([1, 2, 3], { |v| + riter([1, 2, 3]/~, { |v| if i == 0 { assert v == 3; } i += v }); @@ -1851,7 +1852,7 @@ mod tests { #[test] fn test_riteri() { let mut i = 0; - riteri([0, 1, 2], { |j, v| + riteri([0, 1, 2]/~, { |j, v| if i == 0 { assert v == 2; } assert j == v as uint; i += v; @@ -1861,56 +1862,57 @@ mod tests { #[test] fn test_permute() { - let mut results: [[int]]; + let mut results: [[int]/~]/~; - results = []; - permute([]) {|v| results += [v]; } - assert results == [[]]; + results = []/~; + permute([]/~) {|v| results += [v]/~; } + assert results == [[]/~]/~; - results = []; - permute([7]) {|v| results += [v]; } - assert results == [[7]]; + results = []/~; + permute([7]/~) {|v| results += [v]/~; } + assert results == [[7]/~]/~; - results = []; - permute([1,1]) {|v| results += [v]; } - assert results == [[1,1],[1,1]]; + results = []/~; + permute([1,1]/~) {|v| results += [v]/~; } + assert results == [[1,1]/~,[1,1]/~]/~; - results = []; - permute([5,2,0]) {|v| results += [v]; } - assert results == [[5,2,0],[5,0,2],[2,5,0],[2,0,5],[0,5,2],[0,2,5]]; + results = []/~; + permute([5,2,0]/~) {|v| results += [v]/~; } + assert results == + [[5,2,0]/~,[5,0,2]/~,[2,5,0]/~,[2,0,5]/~,[0,5,2]/~,[0,2,5]/~]/~; } #[test] fn test_any_and_all() { - assert (any([1u, 2u, 3u], is_three)); - assert (!any([0u, 1u, 2u], is_three)); - assert (any([1u, 2u, 3u, 4u, 5u], is_three)); - assert (!any([1u, 2u, 4u, 5u, 6u], is_three)); + assert (any([1u, 2u, 3u]/~, is_three)); + assert (!any([0u, 1u, 2u]/~, is_three)); + assert (any([1u, 2u, 3u, 4u, 5u]/~, is_three)); + assert (!any([1u, 2u, 4u, 5u, 6u]/~, is_three)); - assert (all([3u, 3u, 3u], is_three)); - assert (!all([3u, 3u, 2u], is_three)); - assert (all([3u, 3u, 3u, 3u, 3u], is_three)); - assert (!all([3u, 3u, 0u, 1u, 2u], is_three)); + assert (all([3u, 3u, 3u]/~, is_three)); + assert (!all([3u, 3u, 2u]/~, is_three)); + assert (all([3u, 3u, 3u, 3u, 3u]/~, is_three)); + assert (!all([3u, 3u, 0u, 1u, 2u]/~, is_three)); } #[test] fn test_any2_and_all2() { - assert (any2([2u, 4u, 6u], [2u, 4u, 6u], is_equal)); - assert (any2([1u, 2u, 3u], [4u, 5u, 3u], is_equal)); - assert (!any2([1u, 2u, 3u], [4u, 5u, 6u], is_equal)); - assert (any2([2u, 4u, 6u], [2u, 4u], is_equal)); + assert (any2([2u, 4u, 6u]/~, [2u, 4u, 6u]/~, is_equal)); + assert (any2([1u, 2u, 3u]/~, [4u, 5u, 3u]/~, is_equal)); + assert (!any2([1u, 2u, 3u]/~, [4u, 5u, 6u]/~, is_equal)); + assert (any2([2u, 4u, 6u]/~, [2u, 4u]/~, is_equal)); - assert (all2([2u, 4u, 6u], [2u, 4u, 6u], is_equal)); - assert (!all2([1u, 2u, 3u], [4u, 5u, 3u], is_equal)); - assert (!all2([1u, 2u, 3u], [4u, 5u, 6u], is_equal)); - assert (!all2([2u, 4u, 6u], [2u, 4u], is_equal)); + assert (all2([2u, 4u, 6u]/~, [2u, 4u, 6u]/~, is_equal)); + assert (!all2([1u, 2u, 3u]/~, [4u, 5u, 3u]/~, is_equal)); + assert (!all2([1u, 2u, 3u]/~, [4u, 5u, 6u]/~, is_equal)); + assert (!all2([2u, 4u, 6u]/~, [2u, 4u]/~, is_equal)); } #[test] fn test_zip_unzip() { - let v1 = [1, 2, 3]; - let v2 = [4, 5, 6]; + let v1 = [1, 2, 3]/~; + let v2 = [4, 5, 6]/~; let z1 = zip(v1, v2); @@ -1927,9 +1929,9 @@ mod tests { #[test] fn test_position_elem() { - assert position_elem([], 1) == none; + assert position_elem([]/~, 1) == none; - let v1 = [1, 2, 3, 3, 2, 5]; + let v1 = [1, 2, 3, 3, 2, 5]/~; assert position_elem(v1, 1) == some(0u); assert position_elem(v1, 2) == some(1u); assert position_elem(v1, 5) == some(5u); @@ -1941,19 +1943,19 @@ mod tests { fn less_than_three(&&i: int) -> bool { ret i < 3; } fn is_eighteen(&&i: int) -> bool { ret i == 18; } - assert position([], less_than_three) == none; + assert position([]/~, less_than_three) == none; - let v1 = [5, 4, 3, 2, 1]; + let v1 = [5, 4, 3, 2, 1]/~; assert position(v1, less_than_three) == some(3u); assert position(v1, is_eighteen) == none; } #[test] fn test_position_between() { - assert position_between([], 0u, 0u, f) == none; + assert position_between([]/~, 0u, 0u, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert position_between(v, 0u, 0u, f) == none; assert position_between(v, 0u, 1u, f) == none; @@ -1978,11 +1980,11 @@ mod tests { #[test] fn test_find() { - assert find([], f) == none; + assert find([]/~, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } fn g(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'd' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert find(v, f) == some((1, 'b')); assert find(v, g) == none; @@ -1990,10 +1992,10 @@ mod tests { #[test] fn test_find_between() { - assert find_between([], 0u, 0u, f) == none; + assert find_between([]/~, 0u, 0u, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert find_between(v, 0u, 0u, f) == none; assert find_between(v, 0u, 1u, f) == none; @@ -2018,11 +2020,11 @@ mod tests { #[test] fn test_rposition() { - assert find([], f) == none; + assert find([]/~, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } fn g(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'd' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert position(v, f) == some(1u); assert position(v, g) == none; @@ -2030,10 +2032,10 @@ mod tests { #[test] fn test_rposition_between() { - assert rposition_between([], 0u, 0u, f) == none; + assert rposition_between([]/~, 0u, 0u, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert rposition_between(v, 0u, 0u, f) == none; assert rposition_between(v, 0u, 1u, f) == none; @@ -2058,11 +2060,11 @@ mod tests { #[test] fn test_rfind() { - assert rfind([], f) == none; + assert rfind([]/~, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } fn g(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'd' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert rfind(v, f) == some((3, 'b')); assert rfind(v, g) == none; @@ -2070,10 +2072,10 @@ mod tests { #[test] fn test_rfind_between() { - assert rfind_between([], 0u, 0u, f) == none; + assert rfind_between([]/~, 0u, 0u, f) == none; fn f(xy: (int, char)) -> bool { let (_x, y) = xy; y == 'b' } - let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; + let mut v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]/~; assert rfind_between(v, 0u, 0u, f) == none; assert rfind_between(v, 0u, 1u, f) == none; @@ -2098,121 +2100,123 @@ mod tests { #[test] fn reverse_and_reversed() { - let v: [mut int] = [mut 10, 20]; + let v: [mut int]/~ = [mut 10, 20]/~; assert (v[0] == 10); assert (v[1] == 20); reverse(v); assert (v[0] == 20); assert (v[1] == 10); - let v2 = reversed::([10, 20]); + let v2 = reversed::([10, 20]/~); assert (v2[0] == 20); assert (v2[1] == 10); v[0] = 30; assert (v2[0] == 20); // Make sure they work with 0-length vectors too. - let v4 = reversed::([]); - assert (v4 == []); - let v3: [mut int] = [mut]; + let v4 = reversed::([]/~); + assert (v4 == []/~); + let v3: [mut int]/~ = [mut]/~; reverse::(v3); } #[test] fn reversed_mut() { - let v2 = reversed::([mut 10, 20]); + let v2 = reversed::([mut 10, 20]/~); assert (v2[0] == 20); assert (v2[1] == 10); } #[test] fn test_init() { - let v = init([1, 2, 3]); - assert v == [1, 2]; + let v = init([1, 2, 3]/~); + assert v == [1, 2]/~; } #[test] fn test_split() { fn f(&&x: int) -> bool { x == 3 } - assert split([], f) == []; - assert split([1, 2], f) == [[1, 2]]; - assert split([3, 1, 2], f) == [[], [1, 2]]; - assert split([1, 2, 3], f) == [[1, 2], []]; - assert split([1, 2, 3, 4, 3, 5], f) == [[1, 2], [4], [5]]; + assert split([]/~, f) == []/~; + assert split([1, 2]/~, f) == [[1, 2]/~]/~; + assert split([3, 1, 2]/~, f) == [[]/~, [1, 2]/~]/~; + assert split([1, 2, 3]/~, f) == [[1, 2]/~, []/~]/~; + assert split([1, 2, 3, 4, 3, 5]/~, f) == [[1, 2]/~, [4]/~, [5]/~]/~; } #[test] fn test_splitn() { fn f(&&x: int) -> bool { x == 3 } - assert splitn([], 1u, f) == []; - assert splitn([1, 2], 1u, f) == [[1, 2]]; - assert splitn([3, 1, 2], 1u, f) == [[], [1, 2]]; - assert splitn([1, 2, 3], 1u, f) == [[1, 2], []]; - assert splitn([1, 2, 3, 4, 3, 5], 1u, f) == [[1, 2], [4, 3, 5]]; + assert splitn([]/~, 1u, f) == []/~; + assert splitn([1, 2]/~, 1u, f) == [[1, 2]/~]/~; + assert splitn([3, 1, 2]/~, 1u, f) == [[]/~, [1, 2]/~]/~; + assert splitn([1, 2, 3]/~, 1u, f) == [[1, 2]/~, []/~]/~; + assert splitn([1, 2, 3, 4, 3, 5]/~, 1u, f) == + [[1, 2]/~, [4, 3, 5]/~]/~; } #[test] fn test_rsplit() { fn f(&&x: int) -> bool { x == 3 } - assert rsplit([], f) == []; - assert rsplit([1, 2], f) == [[1, 2]]; - assert rsplit([1, 2, 3], f) == [[1, 2], []]; - assert rsplit([1, 2, 3, 4, 3, 5], f) == [[1, 2], [4], [5]]; + assert rsplit([]/~, f) == []/~; + assert rsplit([1, 2]/~, f) == [[1, 2]/~]/~; + assert rsplit([1, 2, 3]/~, f) == [[1, 2]/~, []/~]/~; + assert rsplit([1, 2, 3, 4, 3, 5]/~, f) == [[1, 2]/~, [4]/~, [5]/~]/~; } #[test] fn test_rsplitn() { fn f(&&x: int) -> bool { x == 3 } - assert rsplitn([], 1u, f) == []; - assert rsplitn([1, 2], 1u, f) == [[1, 2]]; - assert rsplitn([1, 2, 3], 1u, f) == [[1, 2], []]; - assert rsplitn([1, 2, 3, 4, 3, 5], 1u, f) == [[1, 2, 3, 4], [5]]; + assert rsplitn([]/~, 1u, f) == []/~; + assert rsplitn([1, 2]/~, 1u, f) == [[1, 2]/~]/~; + assert rsplitn([1, 2, 3]/~, 1u, f) == [[1, 2]/~, []/~]/~; + assert rsplitn([1, 2, 3, 4, 3, 5]/~, 1u, f) == + [[1, 2, 3, 4]/~, [5]/~]/~; } #[test] #[should_fail] #[ignore(cfg(windows))] fn test_init_empty() { - init::([]); + init::([]/~); } #[test] fn test_concat() { - assert concat([[1], [2,3]]) == [1, 2, 3]; + assert concat([[1]/~, [2,3]/~]/~) == [1, 2, 3]/~; } #[test] fn test_connect() { - assert connect([], 0) == []; - assert connect([[1], [2, 3]], 0) == [1, 0, 2, 3]; - assert connect([[1], [2], [3]], 0) == [1, 0, 2, 0, 3]; + assert connect([]/~, 0) == []/~; + assert connect([[1]/~, [2, 3]/~]/~, 0) == [1, 0, 2, 3]/~; + assert connect([[1]/~, [2]/~, [3]/~]/~, 0) == [1, 0, 2, 0, 3]/~; } #[test] fn test_windowed () { - assert [[1u,2u,3u],[2u,3u,4u],[3u,4u,5u],[4u,5u,6u]] - == windowed (3u, [1u,2u,3u,4u,5u,6u]); + assert [[1u,2u,3u]/~,[2u,3u,4u]/~,[3u,4u,5u]/~,[4u,5u,6u]/~]/~ + == windowed (3u, [1u,2u,3u,4u,5u,6u]/~); - assert [[1u,2u,3u,4u],[2u,3u,4u,5u],[3u,4u,5u,6u]] - == windowed (4u, [1u,2u,3u,4u,5u,6u]); + assert [[1u,2u,3u,4u]/~,[2u,3u,4u,5u]/~,[3u,4u,5u,6u]/~]/~ + == windowed (4u, [1u,2u,3u,4u,5u,6u]/~); - assert [] == windowed (7u, [1u,2u,3u,4u,5u,6u]); + assert []/~ == windowed (7u, [1u,2u,3u,4u,5u,6u]/~); } #[test] #[should_fail] #[ignore(cfg(windows))] fn test_windowed_() { - let _x = windowed (0u, [1u,2u,3u,4u,5u,6u]); + let _x = windowed (0u, [1u,2u,3u,4u,5u,6u]/~); } #[test] fn to_mut_no_copy() { unsafe { - let x = [1, 2, 3]; + let x = [1, 2, 3]/~; let addr = unsafe::to_ptr(x); let x_mut = to_mut(x); let addr_mut = unsafe::to_ptr(x_mut); @@ -2223,7 +2227,7 @@ mod tests { #[test] fn from_mut_no_copy() { unsafe { - let x = [mut 1, 2, 3]; + let x = [mut 1, 2, 3]/~; let addr = unsafe::to_ptr(x); let x_imm = from_mut(x); let addr_imm = unsafe::to_ptr(x_imm); @@ -2233,24 +2237,24 @@ mod tests { #[test] fn test_unshift() { - let mut x = [1, 2, 3]; + let mut x = [1, 2, 3]/~; unshift(x, 0); - assert x == [0, 1, 2, 3]; + assert x == [0, 1, 2, 3]/~; } #[test] fn test_capacity() { - let mut v = [0u64]; + let mut v = [0u64]/~; reserve(v, 10u); assert capacity(v) == 10u; - let mut v = [0u32]; + let mut v = [0u32]/~; reserve(v, 10u); assert capacity(v) == 10u; } #[test] fn test_view() { - let v = [1, 2, 3, 4, 5]; + let v = [1, 2, 3, 4, 5]/~; let v = view(v, 1u, 3u); assert(len(v) == 2u); assert(v[0] == 2); diff --git a/src/libstd/arena.rs b/src/libstd/arena.rs index 163deef0640..ede119bb459 100644 --- a/src/libstd/arena.rs +++ b/src/libstd/arena.rs @@ -5,11 +5,11 @@ export arena, arena_with_size; import list; import list::{list, cons, nil}; -type chunk = {data: [u8], mut fill: uint}; +type chunk = {data: [u8]/~, mut fill: uint}; type arena = {mut chunks: @list<@chunk>}; fn chunk(size: uint) -> @chunk { - let mut v = []; + let mut v = []/~; vec::reserve(v, size); @{ data: v, mut fill: 0u } } diff --git a/src/libstd/bitv.rs b/src/libstd/bitv.rs index 254ed4d4ede..2cbc9ae0187 100644 --- a/src/libstd/bitv.rs +++ b/src/libstd/bitv.rs @@ -22,7 +22,7 @@ export eq_vec; // for the case where nbits <= 32. #[doc = "The bitvector type"] -type bitv = @{storage: [mut uint], nbits: uint}; +type bitv = @{storage: [mut uint]/~, nbits: uint}; const uint_bits: uint = 32u + (1u << 32u >> 27u); @@ -183,7 +183,7 @@ Converts the bitvector to a vector of uint with the same length. Each uint in the resulting vector has either value 0u or 1u. "] -fn to_vec(v: bitv) -> [uint] { +fn to_vec(v: bitv) -> [uint]/~ { let sub = {|x|init_to_vec(v, x)}; ret vec::from_fn::(v.nbits, sub); } @@ -225,7 +225,7 @@ Compare a bitvector to a vector of uint The uint vector is expected to only contain the values 0u and 1u. Both the bitvector and vector must have the same length "] -fn eq_vec(v0: bitv, v1: [uint]) -> bool { +fn eq_vec(v0: bitv, v1: [uint]/~) -> bool { assert (v0.nbits == vec::len::(v1)); let len = v0.nbits; let mut i = 0u; @@ -262,9 +262,9 @@ mod tests { fn test_1_element() { let mut act; act = bitv(1u, false); - assert (eq_vec(act, [0u])); + assert (eq_vec(act, [0u]/~)); act = bitv(1u, true); - assert (eq_vec(act, [1u])); + assert (eq_vec(act, [1u]/~)); } #[test] @@ -273,11 +273,11 @@ mod tests { // all 0 act = bitv(10u, false); - assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u])); + assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u]/~)); // all 1 act = bitv(10u, true); - assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u])); + assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(10u, false); @@ -286,7 +286,7 @@ mod tests { set(act, 2u, true); set(act, 3u, true); set(act, 4u, true); - assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u])); + assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(10u, false); @@ -295,7 +295,7 @@ mod tests { set(act, 7u, true); set(act, 8u, true); set(act, 9u, true); - assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u])); + assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(10u, false); @@ -303,7 +303,7 @@ mod tests { set(act, 3u, true); set(act, 6u, true); set(act, 9u, true); - assert (eq_vec(act, [1u, 0u, 0u, 1u, 0u, 0u, 1u, 0u, 0u, 1u])); + assert (eq_vec(act, [1u, 0u, 0u, 1u, 0u, 0u, 1u, 0u, 0u, 1u]/~)); } #[test] @@ -315,14 +315,14 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u]/~)); // all 1 act = bitv(31u, true); assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u])); + 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(31u, false); @@ -337,7 +337,7 @@ mod tests { assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(31u, false); @@ -352,7 +352,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(31u, false); @@ -366,7 +366,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u])); + 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(31u, false); @@ -376,7 +376,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 1u])); + 0u, 0u, 0u, 0u, 1u]/~)); } #[test] @@ -388,14 +388,14 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u]/~)); // all 1 act = bitv(32u, true); assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u, 1u])); + 1u, 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(32u, false); @@ -410,7 +410,7 @@ mod tests { assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(32u, false); @@ -425,7 +425,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(32u, false); @@ -440,7 +440,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u, 1u])); + 1u, 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(32u, false); @@ -451,7 +451,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 1u, 1u])); + 0u, 0u, 0u, 0u, 1u, 1u]/~)); } #[test] @@ -463,14 +463,14 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u, 0u]/~)); // all 1 act = bitv(33u, true); assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u, 1u, 1u])); + 1u, 1u, 1u, 1u, 1u, 1u, 1u]/~)); // mixed act = bitv(33u, false); @@ -485,7 +485,7 @@ mod tests { assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(33u, false); @@ -500,7 +500,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, - 0u, 0u, 0u, 0u, 0u, 0u, 0u])); + 0u, 0u, 0u, 0u, 0u, 0u, 0u]/~)); // mixed act = bitv(33u, false); @@ -515,7 +515,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u, - 1u, 1u, 1u, 1u, 1u, 1u, 0u])); + 1u, 1u, 1u, 1u, 1u, 1u, 0u]/~)); // mixed act = bitv(33u, false); @@ -527,7 +527,7 @@ mod tests { assert (eq_vec(act, [0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, - 0u, 0u, 0u, 0u, 1u, 1u, 1u])); + 0u, 0u, 0u, 0u, 1u, 1u, 1u]/~)); } #[test] diff --git a/src/libstd/deque.rs b/src/libstd/deque.rs index f57d2a21af6..2a9e663a08c 100644 --- a/src/libstd/deque.rs +++ b/src/libstd/deque.rs @@ -24,10 +24,10 @@ fn create() -> t { * Grow is only called on full elts, so nelts is also len(elts), unlike * elsewhere. */ - fn grow(nelts: uint, lo: uint, -elts: [mut cell]) -> - [mut cell] { + fn grow(nelts: uint, lo: uint, -elts: [mut cell]/~) -> + [mut cell]/~ { assert (nelts == vec::len(elts)); - let mut rv = [mut]; + let mut rv = [mut]/~; let mut i = 0u; let nalloc = uint::next_power_of_two(nelts + 1u); diff --git a/src/libstd/ebml.rs b/src/libstd/ebml.rs index d02c498025c..3a7b88ae7a9 100644 --- a/src/libstd/ebml.rs +++ b/src/libstd/ebml.rs @@ -35,7 +35,7 @@ type ebml_state = {ebml_tag: ebml_tag, tag_pos: uint, data_pos: uint}; // modules within this file. // ebml reading -type doc = {data: @[u8], start: uint, end: uint}; +type doc = {data: @[u8]/~, start: uint, end: uint}; type tagged_doc = {tag: uint, doc: doc}; @@ -62,11 +62,11 @@ fn vuint_at(data: [u8]/&, start: uint) -> {val: uint, next: uint} { } else { #error("vint too big"); fail; } } -fn doc(data: @[u8]) -> doc { +fn doc(data: @[u8]/~) -> doc { ret {data: data, start: 0u, end: vec::len::(*data)}; } -fn doc_at(data: @[u8], start: uint) -> tagged_doc { +fn doc_at(data: @[u8]/~, start: uint) -> tagged_doc { let elt_tag = vuint_at(*data, start); let elt_size = vuint_at(*data, elt_tag.next); let end = elt_size.next + elt_size.val; @@ -119,7 +119,7 @@ fn tagged_docs(d: doc, tg: uint, it: fn(doc)) { } } -fn doc_data(d: doc) -> [u8] { ret vec::slice::(*d.data, d.start, d.end); } +fn doc_data(d: doc) -> [u8]/~ { vec::slice::(*d.data, d.start, d.end) } fn doc_as_str(d: doc) -> str { ret str::from_bytes(doc_data(d)); } @@ -149,7 +149,7 @@ fn doc_as_i32(d: doc) -> i32 { doc_as_u32(d) as i32 } fn doc_as_i64(d: doc) -> i64 { doc_as_u64(d) as i64 } // ebml writing -type writer = {writer: io::writer, mut size_positions: [uint]}; +type writer = {writer: io::writer, mut size_positions: [uint]/~}; fn write_sized_vuint(w: io::writer, n: uint, size: uint) { alt size { @@ -180,7 +180,7 @@ fn write_vuint(w: io::writer, n: uint) { } fn writer(w: io::writer) -> writer { - let size_positions: [uint] = []; + let size_positions: [uint]/~ = []/~; ret {writer: w, mut size_positions: size_positions}; } diff --git a/src/libstd/getopts.rs b/src/libstd/getopts.rs index a08536f9441..351542111ef 100644 --- a/src/libstd/getopts.rs +++ b/src/libstd/getopts.rs @@ -28,12 +28,12 @@ name following -o, and accepts both -h and --help as optional flags. } fn print_usage(program: str) { - io::println(\"Usage: \" + program + \" [options]\"); + io::println(\"Usage: \" + program + \" [options]/~\"); io::println(\"-o\t\tOutput\"); io::println(\"-h --help\tUsage\"); } - fn main(args: [str]) { + fn main(args: [str]/~) { check vec::is_not_empty(args); let program : str = vec::head(args); @@ -42,7 +42,7 @@ name following -o, and accepts both -h and --help as optional flags. optopt(\"o\"), optflag(\"h\"), optflag(\"help\") - ]; + ]/~; let match = alt getopts(vec::tail(args), opts) { result::ok(m) { m } result::err(f) { fail fail_str(f) } @@ -134,7 +134,7 @@ enum optval { val(str), given, } The result of checking command line arguments. Contains a vector of matches and a vector of free strings. "] -type match = {opts: [opt], vals: [[optval]], free: [str]}; +type match = {opts: [opt]/~, vals: [[optval]/~]/~, free: [str]/~}; fn is_arg(arg: str) -> bool { ret str::len(arg) > 1u && arg[0] == '-' as u8; @@ -144,7 +144,7 @@ fn name_str(nm: name) -> str { ret alt nm { short(ch) { str::from_char(ch) } long(s) { s } }; } -fn find_opt(opts: [opt], nm: name) -> option { +fn find_opt(opts: [opt]/~, nm: name) -> option { vec::position(opts, { |opt| opt.name == nm }) } @@ -188,21 +188,21 @@ On success returns `ok(opt)`. Use functions such as `opt_present` `opt_str`, etc. to interrogate results. Returns `err(fail_)` on failure. Use to get an error message. "] -fn getopts(args: [str], opts: [opt]) -> result unsafe { +fn getopts(args: [str]/~, opts: [opt]/~) -> result unsafe { let n_opts = vec::len::(opts); - fn f(_x: uint) -> [optval] { ret []; } + fn f(_x: uint) -> [optval]/~ { ret []/~; } let vals = vec::to_mut(vec::from_fn(n_opts, f)); - let mut free: [str] = []; + let mut free: [str]/~ = []/~; let l = vec::len(args); let mut i = 0u; while i < l { let cur = args[i]; let curlen = str::len(cur); if !is_arg(cur) { - free += [cur]; + free += [cur]/~; } else if str::eq(cur, "--") { let mut j = i + 1u; - while j < l { free += [args[j]]; j += 1u; } + while j < l { vec::push(free, args[j]); j += 1u; } break; } else { let mut names; @@ -211,19 +211,19 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe { let tail = str::slice(cur, 2u, curlen); let tail_eq = str::splitn_char(tail, '=', 1u); if vec::len(tail_eq) <= 1u { - names = [long(tail)]; + names = [long(tail)]/~; } else { names = - [long(tail_eq[0])]; + [long(tail_eq[0])]/~; i_arg = option::some::(tail_eq[1]); } } else { let mut j = 1u; - names = []; + names = []/~; while j < curlen { let range = str::char_range_at(cur, j); - names += [short(range.ch)]; + names += [short(range.ch)]/~; j = range.next; } } @@ -239,22 +239,23 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe { if !option::is_none::(i_arg) { ret err(unexpected_argument(name_str(nm))); } - vals[optid] += [given]; + vec::push(vals[optid], given); } maybe { if !option::is_none::(i_arg) { - vals[optid] += [val(option::get(i_arg))]; + vec::push(vals[optid], val(option::get(i_arg))); } else if name_pos < vec::len::(names) || i + 1u == l || is_arg(args[i + 1u]) { - vals[optid] += [given]; - } else { i += 1u; vals[optid] += [val(args[i])]; } + vec::push(vals[optid], given); + } else { i += 1u; vec::push(vals[optid], val(args[i])); } } yes { if !option::is_none::(i_arg) { - vals[optid] += [val(option::get::(i_arg))]; + vec::push(vals[optid], + val(option::get::(i_arg))); } else if i + 1u == l { ret err(argument_missing(name_str(nm))); - } else { i += 1u; vals[optid] += [val(args[i])]; } + } else { i += 1u; vec::push(vals[optid], val(args[i])); } } } } @@ -280,7 +281,7 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe { ret ok({opts: opts, vals: vec::from_mut(vals), free: free}); } -fn opt_vals(m: match, nm: str) -> [optval] { +fn opt_vals(m: match, nm: str) -> [optval]/~ { ret alt find_opt(m.opts, mkname(nm)) { some(id) { m.vals[id] } none { #error("No option '%s' defined", nm); fail } @@ -295,7 +296,7 @@ fn opt_present(m: match, nm: str) -> bool { } #[doc = "Returns true if any of several options were matched"] -fn opts_present(m: match, names: [str]) -> bool { +fn opts_present(m: match, names: [str]/~) -> bool { for vec::each(names) {|nm| alt find_opt(m.opts, mkname(nm)) { some(_) { ret true; } @@ -321,7 +322,7 @@ Returns the string argument supplied to one of several matching options Fails if the no option was provided from the given list, or if the no such option took an argument "] -fn opts_str(m: match, names: [str]) -> str { +fn opts_str(m: match, names: [str]/~) -> str { for vec::each(names) {|nm| alt opt_val(m, nm) { val(s) { ret s } @@ -337,10 +338,10 @@ Returns a vector of the arguments provided to all matches of the given option. Used when an option accepts multiple values. "] -fn opt_strs(m: match, nm: str) -> [str] { - let mut acc: [str] = []; +fn opt_strs(m: match, nm: str) -> [str]/~ { + let mut acc: [str]/~ = []/~; for vec::each(opt_vals(m, nm)) {|v| - alt v { val(s) { acc += [s]; } _ { } } + alt v { val(s) { acc += [s]/~; } _ { } } } ret acc; } @@ -395,8 +396,8 @@ mod tests { // Tests for reqopt #[test] fn test_reqopt_long() { - let args = ["--test=20"]; - let opts = [reqopt("test")]; + let args = ["--test=20"]/~; + let opts = [reqopt("test")]/~; let rs = getopts(args, opts); alt check rs { ok(m) { @@ -408,8 +409,8 @@ mod tests { #[test] fn test_reqopt_long_missing() { - let args = ["blah"]; - let opts = [reqopt("test")]; + let args = ["blah"]/~; + let opts = [reqopt("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_missing_); } @@ -419,8 +420,8 @@ mod tests { #[test] fn test_reqopt_long_no_arg() { - let args = ["--test"]; - let opts = [reqopt("test")]; + let args = ["--test"]/~; + let opts = [reqopt("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -430,8 +431,8 @@ mod tests { #[test] fn test_reqopt_long_multi() { - let args = ["--test=20", "--test=30"]; - let opts = [reqopt("test")]; + let args = ["--test=20", "--test=30"]/~; + let opts = [reqopt("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -441,8 +442,8 @@ mod tests { #[test] fn test_reqopt_short() { - let args = ["-t", "20"]; - let opts = [reqopt("t")]; + let args = ["-t", "20"]/~; + let opts = [reqopt("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -455,8 +456,8 @@ mod tests { #[test] fn test_reqopt_short_missing() { - let args = ["blah"]; - let opts = [reqopt("t")]; + let args = ["blah"]/~; + let opts = [reqopt("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_missing_); } @@ -466,8 +467,8 @@ mod tests { #[test] fn test_reqopt_short_no_arg() { - let args = ["-t"]; - let opts = [reqopt("t")]; + let args = ["-t"]/~; + let opts = [reqopt("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -477,8 +478,8 @@ mod tests { #[test] fn test_reqopt_short_multi() { - let args = ["-t", "20", "-t", "30"]; - let opts = [reqopt("t")]; + let args = ["-t", "20", "-t", "30"]/~; + let opts = [reqopt("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -490,8 +491,8 @@ mod tests { // Tests for optopt #[test] fn test_optopt_long() { - let args = ["--test=20"]; - let opts = [optopt("test")]; + let args = ["--test=20"]/~; + let opts = [optopt("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -504,8 +505,8 @@ mod tests { #[test] fn test_optopt_long_missing() { - let args = ["blah"]; - let opts = [optopt("test")]; + let args = ["blah"]/~; + let opts = [optopt("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "test")); } @@ -515,8 +516,8 @@ mod tests { #[test] fn test_optopt_long_no_arg() { - let args = ["--test"]; - let opts = [optopt("test")]; + let args = ["--test"]/~; + let opts = [optopt("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -526,8 +527,8 @@ mod tests { #[test] fn test_optopt_long_multi() { - let args = ["--test=20", "--test=30"]; - let opts = [optopt("test")]; + let args = ["--test=20", "--test=30"]/~; + let opts = [optopt("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -537,8 +538,8 @@ mod tests { #[test] fn test_optopt_short() { - let args = ["-t", "20"]; - let opts = [optopt("t")]; + let args = ["-t", "20"]/~; + let opts = [optopt("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -551,8 +552,8 @@ mod tests { #[test] fn test_optopt_short_missing() { - let args = ["blah"]; - let opts = [optopt("t")]; + let args = ["blah"]/~; + let opts = [optopt("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "t")); } @@ -562,8 +563,8 @@ mod tests { #[test] fn test_optopt_short_no_arg() { - let args = ["-t"]; - let opts = [optopt("t")]; + let args = ["-t"]/~; + let opts = [optopt("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -573,8 +574,8 @@ mod tests { #[test] fn test_optopt_short_multi() { - let args = ["-t", "20", "-t", "30"]; - let opts = [optopt("t")]; + let args = ["-t", "20", "-t", "30"]/~; + let opts = [optopt("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -586,8 +587,8 @@ mod tests { // Tests for optflag #[test] fn test_optflag_long() { - let args = ["--test"]; - let opts = [optflag("test")]; + let args = ["--test"]/~; + let opts = [optflag("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (opt_present(m, "test")); } @@ -597,8 +598,8 @@ mod tests { #[test] fn test_optflag_long_missing() { - let args = ["blah"]; - let opts = [optflag("test")]; + let args = ["blah"]/~; + let opts = [optflag("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "test")); } @@ -608,8 +609,8 @@ mod tests { #[test] fn test_optflag_long_arg() { - let args = ["--test=20"]; - let opts = [optflag("test")]; + let args = ["--test=20"]/~; + let opts = [optflag("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { @@ -622,8 +623,8 @@ mod tests { #[test] fn test_optflag_long_multi() { - let args = ["--test", "--test"]; - let opts = [optflag("test")]; + let args = ["--test", "--test"]/~; + let opts = [optflag("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -633,8 +634,8 @@ mod tests { #[test] fn test_optflag_short() { - let args = ["-t"]; - let opts = [optflag("t")]; + let args = ["-t"]/~; + let opts = [optflag("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (opt_present(m, "t")); } @@ -644,8 +645,8 @@ mod tests { #[test] fn test_optflag_short_missing() { - let args = ["blah"]; - let opts = [optflag("t")]; + let args = ["blah"]/~; + let opts = [optflag("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "t")); } @@ -655,8 +656,8 @@ mod tests { #[test] fn test_optflag_short_arg() { - let args = ["-t", "20"]; - let opts = [optflag("t")]; + let args = ["-t", "20"]/~; + let opts = [optflag("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -670,8 +671,8 @@ mod tests { #[test] fn test_optflag_short_multi() { - let args = ["-t", "-t"]; - let opts = [optflag("t")]; + let args = ["-t", "-t"]/~; + let opts = [optflag("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, option_duplicated_); } @@ -683,8 +684,8 @@ mod tests { // Tests for optmulti #[test] fn test_optmulti_long() { - let args = ["--test=20"]; - let opts = [optmulti("test")]; + let args = ["--test=20"]/~; + let opts = [optmulti("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -697,8 +698,8 @@ mod tests { #[test] fn test_optmulti_long_missing() { - let args = ["blah"]; - let opts = [optmulti("test")]; + let args = ["blah"]/~; + let opts = [optmulti("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "test")); } @@ -708,8 +709,8 @@ mod tests { #[test] fn test_optmulti_long_no_arg() { - let args = ["--test"]; - let opts = [optmulti("test")]; + let args = ["--test"]/~; + let opts = [optmulti("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -719,8 +720,8 @@ mod tests { #[test] fn test_optmulti_long_multi() { - let args = ["--test=20", "--test=30"]; - let opts = [optmulti("test")]; + let args = ["--test=20", "--test=30"]/~; + let opts = [optmulti("test")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -735,8 +736,8 @@ mod tests { #[test] fn test_optmulti_short() { - let args = ["-t", "20"]; - let opts = [optmulti("t")]; + let args = ["-t", "20"]/~; + let opts = [optmulti("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -749,8 +750,8 @@ mod tests { #[test] fn test_optmulti_short_missing() { - let args = ["blah"]; - let opts = [optmulti("t")]; + let args = ["blah"]/~; + let opts = [optmulti("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { assert (!opt_present(m, "t")); } @@ -760,8 +761,8 @@ mod tests { #[test] fn test_optmulti_short_no_arg() { - let args = ["-t"]; - let opts = [optmulti("t")]; + let args = ["-t"]/~; + let opts = [optmulti("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, argument_missing_); } @@ -771,8 +772,8 @@ mod tests { #[test] fn test_optmulti_short_multi() { - let args = ["-t", "20", "-t", "30"]; - let opts = [optmulti("t")]; + let args = ["-t", "20", "-t", "30"]/~; + let opts = [optmulti("t")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -787,8 +788,8 @@ mod tests { #[test] fn test_unrecognized_option_long() { - let args = ["--untest"]; - let opts = [optmulti("t")]; + let args = ["--untest"]/~; + let opts = [optmulti("t")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, unrecognized_option_); } @@ -798,8 +799,8 @@ mod tests { #[test] fn test_unrecognized_option_short() { - let args = ["-t"]; - let opts = [optmulti("test")]; + let args = ["-t"]/~; + let opts = [optmulti("test")]/~; let rs = getopts(args, opts); alt rs { err(f) { check_fail_type(f, unrecognized_option_); } @@ -811,11 +812,11 @@ mod tests { fn test_combined() { let args = ["prog", "free1", "-s", "20", "free2", "--flag", "--long=30", - "-f", "-m", "40", "-m", "50", "-n", "-A B", "-n", "-60 70"]; + "-f", "-m", "40", "-m", "50", "-n", "-A B", "-n", "-60 70"]/~; let opts = [optopt("s"), optflag("flag"), reqopt("long"), optflag("f"), optmulti("m"), optmulti("n"), - optopt("notpresent")]; + optopt("notpresent")]/~; let rs = getopts(args, opts); alt rs { ok(m) { @@ -838,23 +839,23 @@ mod tests { #[test] fn test_multi() { - let args = ["-e", "foo", "--encrypt", "foo"]; - let opts = [optopt("e"), optopt("encrypt")]; + let args = ["-e", "foo", "--encrypt", "foo"]/~; + let opts = [optopt("e"), optopt("encrypt")]/~; let match = alt getopts(args, opts) { result::ok(m) { m } result::err(f) { fail; } }; - assert opts_present(match, ["e"]); - assert opts_present(match, ["encrypt"]); - assert opts_present(match, ["encrypt", "e"]); - assert opts_present(match, ["e", "encrypt"]); - assert !opts_present(match, ["thing"]); - assert !opts_present(match, []); + assert opts_present(match, ["e"]/~); + assert opts_present(match, ["encrypt"]/~); + assert opts_present(match, ["encrypt", "e"]/~); + assert opts_present(match, ["e", "encrypt"]/~); + assert !opts_present(match, ["thing"]/~); + assert !opts_present(match, []/~); - assert opts_str(match, ["e"]) == "foo"; - assert opts_str(match, ["encrypt"]) == "foo"; - assert opts_str(match, ["e", "encrypt"]) == "foo"; - assert opts_str(match, ["encrypt", "e"]) == "foo"; + assert opts_str(match, ["e"]/~) == "foo"; + assert opts_str(match, ["encrypt"]/~) == "foo"; + assert opts_str(match, ["e", "encrypt"]/~) == "foo"; + assert opts_str(match, ["encrypt", "e"]/~) == "foo"; } } diff --git a/src/libstd/json.rs b/src/libstd/json.rs index a5a5ac31035..947b037841e 100644 --- a/src/libstd/json.rs +++ b/src/libstd/json.rs @@ -30,7 +30,7 @@ enum json { num(float), string(@str), boolean(bool), - list(@[json]), + list(@[json]/~), dict(map::hashmap), null, } @@ -383,7 +383,7 @@ impl parser for parser { self.bump(); self.parse_whitespace(); - let mut values = []; + let mut values = []/~; if self.ch == ']' { self.bump(); @@ -585,7 +585,7 @@ impl of to_json for @str { impl of to_json for (A, B) { fn to_json() -> json { let (a, b) = self; - list(@[a.to_json(), b.to_json()]) + list(@[a.to_json(), b.to_json()]/~) } } @@ -593,11 +593,11 @@ impl of to_json for (A, B, C) { fn to_json() -> json { let (a, b, c) = self; - list(@[a.to_json(), b.to_json(), c.to_json()]) + list(@[a.to_json(), b.to_json(), c.to_json()]/~) } } -impl of to_json for [A] { +impl of to_json for [A]/~ { fn to_json() -> json { list(@self.map { |elt| elt.to_json() }) } } @@ -632,7 +632,7 @@ impl of to_str::to_str for error { #[cfg(test)] mod tests { - fn mk_dict(items: [(str, json)]) -> json { + fn mk_dict(items: [(str, json)]/~) -> json { let d = map::str_hash(); vec::iter(items) { |item| @@ -670,26 +670,26 @@ mod tests { #[test] fn test_write_list() { - assert to_str(list(@[])) == "[]"; - assert to_str(list(@[boolean(true)])) == "[true]"; + assert to_str(list(@[]/~)) == "[]"; + assert to_str(list(@[boolean(true)]/~)) == "[true]"; assert to_str(list(@[ boolean(false), null, - list(@[string(@"foo\nbar"), num(3.5f)]) - ])) == "[false, null, [\"foo\\nbar\", 3.5]]"; + list(@[string(@"foo\nbar"), num(3.5f)]/~) + ]/~)) == "[false, null, [\"foo\\nbar\", 3.5]]"; } #[test] fn test_write_dict() { - assert to_str(mk_dict([])) == "{}"; - assert to_str(mk_dict([("a", boolean(true))])) == "{ \"a\": true }"; + assert to_str(mk_dict([]/~)) == "{}"; + assert to_str(mk_dict([("a", boolean(true))]/~)) == "{ \"a\": true }"; assert to_str(mk_dict([ ("a", boolean(true)), ("b", list(@[ - mk_dict([("c", string(@"\x0c\r"))]), - mk_dict([("d", string(@""))]) - ])) - ])) == + mk_dict([("c", string(@"\x0c\r"))]/~), + mk_dict([("d", string(@""))]/~) + ]/~)) + ]/~)) == "{ " + "\"a\": true, " + "\"b\": [" + @@ -709,7 +709,7 @@ mod tests { err({line: 1u, col: 6u, msg: @"trailing characters"}); assert from_str("1a") == err({line: 1u, col: 2u, msg: @"trailing characters"}); - assert from_str("[]a") == + assert from_str("[]/~a") == err({line: 1u, col: 3u, msg: @"trailing characters"}); assert from_str("{}a") == err({line: 1u, col: 3u, msg: @"trailing characters"}); @@ -798,15 +798,15 @@ mod tests { assert from_str("[6 7]") == err({line: 1u, col: 4u, msg: @"expecting ',' or ']'"}); - assert from_str("[]") == ok(list(@[])); - assert from_str("[ ]") == ok(list(@[])); - assert from_str("[true]") == ok(list(@[boolean(true)])); - assert from_str("[ false ]") == ok(list(@[boolean(false)])); - assert from_str("[null]") == ok(list(@[null])); - assert from_str("[3, 1]") == ok(list(@[num(3f), num(1f)])); - assert from_str("\n[3, 2]\n") == ok(list(@[num(3f), num(2f)])); + assert from_str("[]") == ok(list(@[]/~)); + assert from_str("[ ]") == ok(list(@[]/~)); + assert from_str("[true]") == ok(list(@[boolean(true)]/~)); + assert from_str("[ false ]") == ok(list(@[boolean(false)]/~)); + assert from_str("[null]") == ok(list(@[null]/~)); + assert from_str("[3, 1]") == ok(list(@[num(3f), num(1f)]/~)); + assert from_str("\n[3, 2]\n") == ok(list(@[num(3f), num(2f)]/~)); assert from_str("[2, [4, 1]]") == - ok(list(@[num(2f), list(@[num(4f), num(1f)])])); + ok(list(@[num(2f), list(@[num(4f), num(1f)]/~)]/~)); } #[test] @@ -835,23 +835,23 @@ mod tests { assert from_str("{\"a\":1,") == err({line: 1u, col: 8u, msg: @"EOF while parsing object"}); - assert eq(result::get(from_str("{}")), mk_dict([])); + assert eq(result::get(from_str("{}")), mk_dict([]/~)); assert eq(result::get(from_str("{\"a\": 3}")), - mk_dict([("a", num(3.0f))])); + mk_dict([("a", num(3.0f))]/~)); assert eq(result::get(from_str("{ \"a\": null, \"b\" : true }")), mk_dict([ ("a", null), - ("b", boolean(true))])); + ("b", boolean(true))]/~)); assert eq(result::get(from_str("\n{ \"a\": null, \"b\" : true }\n")), mk_dict([ ("a", null), - ("b", boolean(true))])); + ("b", boolean(true))]/~)); assert eq(result::get(from_str("{\"a\" : 1.0 ,\"b\": [ true ]}")), mk_dict([ ("a", num(1.0)), - ("b", list(@[boolean(true)])) - ])); + ("b", list(@[boolean(true)]/~)) + ]/~)); assert eq(result::get(from_str( "{" + "\"a\": 1.0, " + @@ -867,10 +867,10 @@ mod tests { boolean(true), string(@"foo\nbar"), mk_dict([ - ("c", mk_dict([("d", null)])) - ]) - ])) - ])); + ("c", mk_dict([("d", null)]/~)) + ]/~) + ]/~)) + ]/~)); } #[test] diff --git a/src/libstd/list.rs b/src/libstd/list.rs index 5a468df4c88..ad7828da8c6 100644 --- a/src/libstd/list.rs +++ b/src/libstd/list.rs @@ -10,7 +10,7 @@ enum list { } #[doc = "Create a list from a vector"] -fn from_vec(v: [T]) -> @list { +fn from_vec(v: [T]/~) -> @list { vec::foldr(v, @nil::, { |h, t| @cons(h, t) }) } @@ -135,9 +135,9 @@ mod tests { #[test] fn test_is_empty() { - let empty : @list::list = from_vec([]); - let full1 = from_vec([1]); - let full2 = from_vec(['r', 'u']); + let empty : @list::list = from_vec([]/~); + let full1 = from_vec([1]/~); + let full2 = from_vec(['r', 'u']/~); assert is_empty(empty); assert !is_empty(full1); @@ -150,7 +150,7 @@ mod tests { #[test] fn test_from_vec() { - let l = from_vec([0, 1, 2]); + let l = from_vec([0, 1, 2]/~); assert (head(l) == 0); @@ -163,14 +163,14 @@ mod tests { #[test] fn test_from_vec_empty() { - let empty : @list::list = from_vec([]); + let empty : @list::list = from_vec([]/~); assert (empty == @list::nil::); } #[test] fn test_foldl() { fn add(&&a: uint, &&b: int) -> uint { ret a + (b as uint); } - let l = from_vec([0, 1, 2, 3, 4]); + let l = from_vec([0, 1, 2, 3, 4]/~); let empty = @list::nil::; assert (list::foldl(0u, l, add) == 10u); assert (list::foldl(0u, empty, add) == 0u); @@ -181,21 +181,21 @@ mod tests { fn sub(&&a: int, &&b: int) -> int { a - b } - let l = from_vec([1, 2, 3, 4]); + let l = from_vec([1, 2, 3, 4]/~); assert (list::foldl(0, l, sub) == -10); } #[test] fn test_find_success() { fn match(&&i: int) -> bool { ret i == 2; } - let l = from_vec([0, 1, 2]); + let l = from_vec([0, 1, 2]/~); assert (list::find(l, match) == option::some(2)); } #[test] fn test_find_fail() { fn match(&&_i: int) -> bool { ret false; } - let l = from_vec([0, 1, 2]); + let l = from_vec([0, 1, 2]/~); let empty = @list::nil::; assert (list::find(l, match) == option::none::); assert (list::find(empty, match) == option::none::); @@ -203,7 +203,7 @@ mod tests { #[test] fn test_has() { - let l = from_vec([5, 8, 6]); + let l = from_vec([5, 8, 6]/~); let empty = @list::nil::; assert (list::has(l, 5)); assert (!list::has(l, 7)); @@ -213,7 +213,7 @@ mod tests { #[test] fn test_len() { - let l = from_vec([0, 1, 2]); + let l = from_vec([0, 1, 2]/~); let empty = @list::nil::; assert (list::len(l) == 3u); assert (list::len(empty) == 0u); diff --git a/src/libstd/map.rs b/src/libstd/map.rs index c988b167fd0..7d1e7b599a0 100644 --- a/src/libstd/map.rs +++ b/src/libstd/map.rs @@ -86,7 +86,7 @@ mod chained { type t = @{ mut count: uint, - mut chains: [mut chain], + mut chains: [mut chain]/~, hasher: hashfn, eqer: eqfn }; @@ -259,7 +259,7 @@ mod chained { fn each_value(blk: fn(V) -> bool) { self.each { |_k, v| blk(v)} } } - fn chains(nchains: uint) -> [mut chain] { + fn chains(nchains: uint) -> [mut chain]/~ { ret vec::to_mut(vec::from_elem(nchains, absent)); } @@ -299,7 +299,7 @@ fn box_str_hash() -> hashmap<@str, V> { } #[doc = "Construct a hashmap for byte string keys"] -fn bytes_hash() -> hashmap<[u8], V> { +fn bytes_hash() -> hashmap<[u8]/~, V> { ret hashmap(vec::u8::hash, vec::u8::eq); } @@ -323,10 +323,10 @@ fn set_add(set: set, key: K) -> bool { #[doc = " Convert a set into a vector. "] -fn vec_from_set(s: set) -> [T] { - let mut v = []; +fn vec_from_set(s: set) -> [T]/~ { + let mut v = []/~; s.each_key() {|k| - v += [k]; + v += [k]/~; true }; v @@ -334,7 +334,7 @@ fn vec_from_set(s: set) -> [T] { #[doc = "Construct a hashmap from a vector"] fn hash_from_vec(hasher: hashfn, eqer: eqfn, - items: [(K, V)]) -> hashmap { + items: [(K, V)]/~) -> hashmap { let map = hashmap(hasher, eqer); vec::iter(items) { |item| let (key, value) = item; @@ -344,22 +344,22 @@ fn hash_from_vec(hasher: hashfn, eqer: eqfn, } #[doc = "Construct a hashmap from a vector with string keys"] -fn hash_from_strs(items: [(str, V)]) -> hashmap { +fn hash_from_strs(items: [(str, V)]/~) -> hashmap { hash_from_vec(str::hash, str::eq, items) } #[doc = "Construct a hashmap from a vector with byte keys"] -fn hash_from_bytes(items: [([u8], V)]) -> hashmap<[u8], V> { +fn hash_from_bytes(items: [([u8]/~, V)]/~) -> hashmap<[u8]/~, V> { hash_from_vec(vec::u8::hash, vec::u8::eq, items) } #[doc = "Construct a hashmap from a vector with int keys"] -fn hash_from_ints(items: [(int, V)]) -> hashmap { +fn hash_from_ints(items: [(int, V)]/~) -> hashmap { hash_from_vec(int::hash, int::eq, items) } #[doc = "Construct a hashmap from a vector with uint keys"] -fn hash_from_uints(items: [(uint, V)]) -> hashmap { +fn hash_from_uints(items: [(uint, V)]/~) -> hashmap { hash_from_vec(uint::hash, uint::eq, items) } @@ -612,7 +612,7 @@ mod tests { ("a", 1), ("b", 2), ("c", 3) - ]); + ]/~); assert map.size() == 3u; assert map.get("a") == 1; assert map.get("b") == 2; diff --git a/src/libstd/md4.rs b/src/libstd/md4.rs index 8f790c88ae9..08fa6f8597a 100644 --- a/src/libstd/md4.rs +++ b/src/libstd/md4.rs @@ -1,21 +1,21 @@ -fn md4(msg: [u8]) -> {a: u32, b: u32, c: u32, d: u32} { +fn md4(msg: [u8]/~) -> {a: u32, b: u32, c: u32, d: u32} { // subtle: if orig_len is merely uint, then the code below // which performs shifts by 32 bits or more has undefined // results. let orig_len: u64 = (vec::len(msg) * 8u) as u64; // pad message - let mut msg = msg + [0x80u8]; + let mut msg = msg + [0x80u8]/~; let mut bitlen = orig_len + 8u64; while (bitlen + 64u64) % 512u64 > 0u64 { - msg += [0u8]; + msg += [0u8]/~; bitlen += 8u64; } // append length let mut i = 0u64; while i < 8u64 { - msg += [(orig_len >> (i * 8u64)) as u8]; + msg += [(orig_len >> (i * 8u64)) as u8]/~; i += 1u64; } @@ -82,7 +82,7 @@ fn md4(msg: [u8]) -> {a: u32, b: u32, c: u32, d: u32} { ret {a: a, b: b, c: c, d: d}; } -fn md4_str(msg: [u8]) -> str { +fn md4_str(msg: [u8]/~) -> str { let {a, b, c, d} = md4(msg); fn app(a: u32, b: u32, c: u32, d: u32, f: fn(u32)) { f(a); f(b); f(c); f(d); diff --git a/src/libstd/net_tcp.rs b/src/libstd/net_tcp.rs index c828566841e..386e869d983 100644 --- a/src/libstd/net_tcp.rs +++ b/src/libstd/net_tcp.rs @@ -113,7 +113,7 @@ fn connect(input_ip: ip::ip_addr, port: uint, closed_signal_ch: comm::chan(closed_signal_po) }; let conn_data_ptr = ptr::addr_of(conn_data); - let reader_po = comm::port::>(); + let reader_po = comm::port::>(); let stream_handle_ptr = malloc_uv_tcp_t(); *(stream_handle_ptr as *mut uv::ll::uv_tcp_t) = uv::ll::tcp_t(); let socket_data = @{ @@ -206,7 +206,7 @@ Write binary data to a tcp stream; Blocks until operation completes # Arguments * sock - a `tcp_socket` to write to -* raw_write_data - a vector of `[u8]` that will be written to the stream. +* raw_write_data - a vector of `[u8]/~` that will be written to the stream. This value must remain valid for the duration of the `write` call # Returns @@ -214,7 +214,7 @@ This value must remain valid for the duration of the `write` call A `result` object with a `nil` value as the `ok` variant, or a `tcp_err_data` value as the `err` variant "] -fn write(sock: tcp_socket, raw_write_data: [u8]) +fn write(sock: tcp_socket, raw_write_data: [u8]/~) -> result::result<(), tcp_err_data> unsafe { let socket_data_ptr = ptr::addr_of(*(sock.socket_data)); write_common_impl(socket_data_ptr, raw_write_data) @@ -238,7 +238,7 @@ Otherwise, use the blocking `tcp::write` function instead. # Arguments * sock - a `tcp_socket` to write to -* raw_write_data - a vector of `[u8]` that will be written to the stream. +* raw_write_data - a vector of `[u8]/~` that will be written to the stream. This value must remain valid for the duration of the `write` call # Returns @@ -247,7 +247,7 @@ A `future` value that, once the `write` operation completes, resolves to a `result` object with a `nil` value as the `ok` variant, or a `tcp_err_data` value as the `err` variant "] -fn write_future(sock: tcp_socket, raw_write_data: [u8]) +fn write_future(sock: tcp_socket, raw_write_data: [u8]/~) -> future::future> unsafe { let socket_data_ptr = ptr::addr_of(*(sock.socket_data)); future::spawn {|| @@ -270,7 +270,7 @@ on) from until `read_stop` is called, or a `tcp_err_data` record "] fn read_start(sock: tcp_socket) -> result::result>, tcp_err_data> unsafe { + result::result<[u8]/~, tcp_err_data>>, tcp_err_data> unsafe { let socket_data = ptr::addr_of(*(sock.socket_data)); read_start_common_impl(socket_data) } @@ -303,13 +303,13 @@ data received. read attempt. Pass `0u` to wait indefinitely "] fn read(sock: tcp_socket, timeout_msecs: uint) - -> result::result<[u8],tcp_err_data> { + -> result::result<[u8]/~,tcp_err_data> { let socket_data = ptr::addr_of(*(sock.socket_data)); read_common_impl(socket_data, timeout_msecs) } #[doc=" -Reads a single chunk of data; returns a `future::future<[u8]>` immediately +Reads a single chunk of data; returns a `future::future<[u8]/~>` immediately Does a non-blocking read operation for a single chunk of data from a `tcp_socket` and immediately returns a `future` value representing the @@ -337,7 +337,7 @@ Otherwise, use the blocking `tcp::read` function instead. read attempt. Pass `0u` to wait indefinitely "] fn read_future(sock: tcp_socket, timeout_msecs: uint) - -> future::future> { + -> future::future> { let socket_data = ptr::addr_of(*(sock.socket_data)); future::spawn {|| read_common_impl(socket_data, timeout_msecs) @@ -590,7 +590,7 @@ fn accept(new_conn: tcp_new_connection) new_tcp_conn(server_handle_ptr) { let server_data_ptr = uv::ll::get_data_for_uv_handle( server_handle_ptr) as *tcp_listen_fc_data; - let reader_po = comm::port::>(); + let reader_po = comm::port::>(); let iotask = (*server_data_ptr).iotask; let stream_handle_ptr = malloc_uv_tcp_t(); *(stream_handle_ptr as *mut uv::ll::uv_tcp_t) = uv::ll::tcp_t(); @@ -790,7 +790,7 @@ Convenience methods extending `net::tcp::tcp_socket` "] impl sock_methods for tcp_socket { fn read_start() -> result::result>, tcp_err_data> { + result::result<[u8]/~, tcp_err_data>>, tcp_err_data> { read_start(self) } fn read_stop() -> @@ -798,18 +798,18 @@ impl sock_methods for tcp_socket { read_stop(self) } fn read(timeout_msecs: uint) -> - result::result<[u8], tcp_err_data> { + result::result<[u8]/~, tcp_err_data> { read(self, timeout_msecs) } fn read_future(timeout_msecs: uint) -> - future::future> { + future::future> { read_future(self, timeout_msecs) } - fn write(raw_write_data: [u8]) + fn write(raw_write_data: [u8]/~) -> result::result<(), tcp_err_data> { write(self, raw_write_data) } - fn write_future(raw_write_data: [u8]) + fn write_future(raw_write_data: [u8]/~) -> future::future> { write_future(self, raw_write_data) } @@ -818,7 +818,7 @@ impl sock_methods for tcp_socket { // shared implementation for tcp::read fn read_common_impl(socket_data: *tcp_socket_data, timeout_msecs: uint) - -> result::result<[u8],tcp_err_data> unsafe { + -> result::result<[u8]/~,tcp_err_data> unsafe { log(debug, "starting tcp::read"); let iotask = (*socket_data).iotask; let rs_result = read_start_common_impl(socket_data); @@ -887,7 +887,7 @@ fn read_stop_common_impl(socket_data: *tcp_socket_data) -> // shared impl for read_start fn read_start_common_impl(socket_data: *tcp_socket_data) -> result::result>, tcp_err_data> unsafe { + result::result<[u8]/~, tcp_err_data>>, tcp_err_data> unsafe { let stream_handle_ptr = (*socket_data).stream_handle_ptr; let start_po = comm::port::>(); let start_ch = comm::chan(start_po); @@ -920,14 +920,14 @@ fn read_start_common_impl(socket_data: *tcp_socket_data) // shared implementation used by write and write_future fn write_common_impl(socket_data_ptr: *tcp_socket_data, - raw_write_data: [u8]) + raw_write_data: [u8]/~) -> result::result<(), tcp_err_data> unsafe { let write_req_ptr = ptr::addr_of((*socket_data_ptr).write_req); let stream_handle_ptr = (*socket_data_ptr).stream_handle_ptr; let write_buf_vec = [ uv::ll::buf_init( vec::unsafe::to_ptr(raw_write_data), - vec::len(raw_write_data)) ]; + vec::len(raw_write_data)) ]/~; let write_buf_vec_ptr = ptr::addr_of(write_buf_vec); let result_po = comm::port::(); let write_data = { @@ -968,7 +968,7 @@ fn conn_port_new_tcp_socket( iotask: iotask) -> result::result unsafe { // tcp_nl_on_connection_cb - let reader_po = comm::port::>(); + let reader_po = comm::port::>(); let client_socket_data = @{ reader_po : reader_po, reader_ch : comm::chan(reader_po), @@ -1120,7 +1120,7 @@ enum tcp_read_start_result { } enum tcp_read_result { - tcp_read_data([u8]), + tcp_read_data([u8]/~), tcp_read_done, tcp_read_err(tcp_err_data) } @@ -1264,8 +1264,8 @@ enum conn_attempt { } type tcp_socket_data = { - reader_po: comm::port>, - reader_ch: comm::chan>, + reader_po: comm::port>, + reader_ch: comm::chan>, stream_handle_ptr: *uv::ll::uv_tcp_t, connect_req: uv::ll::uv_connect_t, write_req: uv::ll::uv_write_t, @@ -1570,7 +1570,7 @@ mod test { } } - fn tcp_write_single(sock: tcp_socket, val: [u8]) { + fn tcp_write_single(sock: tcp_socket, val: [u8]/~) { let write_result_future = sock.write_future(val); let write_result = write_result_future.get(); if result::is_err(write_result) { diff --git a/src/libstd/par.rs b/src/libstd/par.rs index 17297d94bd9..156f21216e5 100644 --- a/src/libstd/par.rs +++ b/src/libstd/par.rs @@ -19,22 +19,22 @@ return the intermediate results. This is used to build most of the other parallel vector functions, like map or alli."] fn map_slices( - xs: [A], + xs: [A]/~, f: fn() -> fn~(uint, [A]/&) -> B) - -> [B] { + -> [B]/~ { let len = xs.len(); if len < min_granularity { log(info, "small slice"); // This is a small vector, fall back on the normal map. - [f()(0u, xs)] + [f()(0u, xs)]/~ } else { let num_tasks = uint::min(max_tasks, len / min_granularity); let items_per_task = len / num_tasks; - let mut futures = []; + let mut futures = []/~; let mut base = 0u; log(info, "spawning tasks"); while base < len { @@ -74,18 +74,19 @@ fn map_slices( } #[doc="A parallel version of map."] -fn map(xs: [A], f: fn~(A) -> B) -> [B] { +fn map(xs: [A]/~, f: fn~(A) -> B) -> [B]/~ { vec::concat(map_slices(xs) {|| - fn~(_base: uint, slice : [A]/&, copy f) -> [B] { + fn~(_base: uint, slice : [A]/&, copy f) -> [B]/~ { vec::map(slice, f) } }) } #[doc="A parallel version of mapi."] -fn mapi(xs: [A], f: fn~(uint, A) -> B) -> [B] { +fn mapi(xs: [A]/~, + f: fn~(uint, A) -> B) -> [B]/~ { let slices = map_slices(xs) {|| - fn~(base: uint, slice : [A]/&, copy f) -> [B] { + fn~(base: uint, slice : [A]/&, copy f) -> [B]/~ { vec::mapi(slice) {|i, x| f(i + base, x) } @@ -102,10 +103,10 @@ fn mapi(xs: [A], f: fn~(uint, A) -> B) -> [B] { In this case, f is a function that creates functions to run over the inner elements. This is to skirt the need for copy constructors."] fn mapi_factory( - xs: [A], f: fn() -> fn~(uint, A) -> B) -> [B] { + xs: [A]/~, f: fn() -> fn~(uint, A) -> B) -> [B]/~ { let slices = map_slices(xs) {|| let f = f(); - fn~(base: uint, slice : [A]/&, move f) -> [B] { + fn~(base: uint, slice : [A]/&, move f) -> [B]/~ { vec::mapi(slice) {|i, x| f(i + base, x) } @@ -118,7 +119,7 @@ fn mapi_factory( } #[doc="Returns true if the function holds for all elements in the vector."] -fn alli(xs: [A], f: fn~(uint, A) -> bool) -> bool { +fn alli(xs: [A]/~, f: fn~(uint, A) -> bool) -> bool { vec::all(map_slices(xs) {|| fn~(base: uint, slice : [A]/&, copy f) -> bool { vec::alli(slice) {|i, x| @@ -129,7 +130,7 @@ fn alli(xs: [A], f: fn~(uint, A) -> bool) -> bool { } #[doc="Returns true if the function holds for any elements in the vector."] -fn any(xs: [A], f: fn~(A) -> bool) -> bool { +fn any(xs: [A]/~, f: fn~(A) -> bool) -> bool { vec::any(map_slices(xs) {|| fn~(_base : uint, slice: [A]/&, copy f) -> bool { vec::any(slice, f) diff --git a/src/libstd/rope.rs b/src/libstd/rope.rs index a8cd3b65ef8..3799e7cdd28 100644 --- a/src/libstd/rope.rs +++ b/src/libstd/rope.rs @@ -97,7 +97,7 @@ Add one char to the end of the rope * this function executes in near-constant time "] fn append_char(rope: rope, char: char) -> rope { - ret append_str(rope, @str::from_chars([char])); + ret append_str(rope, @str::from_chars([char]/~)); } #[doc = " @@ -118,7 +118,7 @@ Add one char to the beginning of the rope * this function executes in near-constant time "] fn prepend_char(rope: rope, char: char) -> rope { - ret prepend_str(rope, @str::from_chars([char])); + ret prepend_str(rope, @str::from_chars([char]/~)); } #[doc = " @@ -153,7 +153,7 @@ If the ropes are balanced initially and have the same height, the resulting rope remains balanced. However, this function does not take any further measure to ensure that the result is balanced. "] -fn concat(v: [rope]) -> rope { +fn concat(v: [rope]/~) -> rope { //Copy `v` into a mut vector let mut len = vec::len(v); if len == 0u { ret node::empty; } @@ -752,7 +752,7 @@ mod node { * forest - The forest. This vector is progressively rewritten during execution and should be discarded as meaningless afterwards. "] - fn tree_from_forest_destructive(forest: [mut @node]) -> @node { + fn tree_from_forest_destructive(forest: [mut @node]/~) -> @node { let mut i; let mut len = vec::len(forest); while len > 1u { @@ -805,7 +805,7 @@ mod node { option::none { break; } option::some(x) { //TODO: Replace with memcpy or something similar - let mut local_buf: [u8] = + let mut local_buf: [u8]/~ = unsafe::reinterpret_cast(*x.content); let mut i = x.byte_offset; while i < x.byte_len { @@ -859,7 +859,7 @@ mod node { fn bal(node: @node) -> option<@node> { if height(node) < hint_max_node_height { ret option::none; } //1. Gather all leaves as a forest - let mut forest = [mut]; + let mut forest = [mut]/~; let it = leaf_iterator::start(node); loop { alt (leaf_iterator::next(it)) { @@ -1113,12 +1113,12 @@ mod node { mod leaf_iterator { type t = { - stack: [mut @node], + stack: [mut @node]/~, mut stackpos: int }; fn empty() -> t { - let stack : [mut @node] = [mut]; + let stack : [mut @node]/~ = [mut]/~; ret {stack: stack, mut stackpos: -1} } diff --git a/src/libstd/serialization.rs b/src/libstd/serialization.rs index 43059ca7828..5c1d7f00b62 100644 --- a/src/libstd/serialization.rs +++ b/src/libstd/serialization.rs @@ -83,7 +83,7 @@ iface deserializer { // // In some cases, these should eventually be coded as traits. -fn emit_from_vec(s: S, v: [T], f: fn(T)) { +fn emit_from_vec(s: S, v: [T]/~, f: fn(T)) { s.emit_vec(vec::len(v)) {|| vec::iteri(v) {|i,e| s.emit_vec_elt(i) {|| @@ -93,7 +93,7 @@ fn emit_from_vec(s: S, v: [T], f: fn(T)) { } } -fn read_to_vec(d: D, f: fn() -> T) -> [T] { +fn read_to_vec(d: D, f: fn() -> T) -> [T]/~ { d.read_vec {|len| vec::from_fn(len) {|i| d.read_vec_elt(i) {|| f() } @@ -102,13 +102,13 @@ fn read_to_vec(d: D, f: fn() -> T) -> [T] { } impl serializer_helpers for S { - fn emit_from_vec(v: [T], f: fn(T)) { + fn emit_from_vec(v: [T]/~, f: fn(T)) { emit_from_vec(self, v, f) } } impl deserializer_helpers for D { - fn read_to_vec(f: fn() -> T) -> [T] { + fn read_to_vec(f: fn() -> T) -> [T]/~ { read_to_vec(self, f) } } diff --git a/src/libstd/sha1.rs b/src/libstd/sha1.rs index 2325a9cd3fb..a492facb1a3 100644 --- a/src/libstd/sha1.rs +++ b/src/libstd/sha1.rs @@ -22,14 +22,14 @@ export sha1; #[doc = "The SHA-1 interface"] iface sha1 { #[doc = "Provide message input as bytes"] - fn input([u8]); + fn input([u8]/~); #[doc = "Provide message input as string"] fn input_str(str); #[doc = " Read the digest as a vector of 20 bytes. After calling this no further input may be provided until reset is called. "] - fn result() -> [u8]; + fn result() -> [u8]/~; #[doc = " Read the digest as a hex string. After calling this no further input may be provided until reset is called. @@ -52,15 +52,15 @@ const k3: u32 = 0xCA62C1D6u32; #[doc = "Construct a `sha` object"] fn sha1() -> sha1 { type sha1state = - {h: [mut u32], + {h: [mut u32]/~, mut len_low: u32, mut len_high: u32, - msg_block: [mut u8], + msg_block: [mut u8]/~, mut msg_block_idx: uint, mut computed: bool, - work_buf: @[mut u32]}; + work_buf: @[mut u32]/~}; - fn add_input(st: sha1state, msg: [u8]) { + fn add_input(st: sha1state, msg: [u8]/~) { /* FIXME: Should be typestate precondition (#2345) */ assert (!st.computed); for vec::each(msg) {|element| @@ -157,15 +157,15 @@ fn sha1() -> sha1 { fn circular_shift(bits: u32, word: u32) -> u32 { ret word << bits | word >> 32u32 - bits; } - fn mk_result(st: sha1state) -> [u8] { + fn mk_result(st: sha1state) -> [u8]/~ { if !st.computed { pad_msg(st); st.computed = true; } - let mut rs: [u8] = []; + let mut rs: [u8]/~ = []/~; for vec::each(st.h) {|hpart| let a = (hpart >> 24u32 & 0xFFu32) as u8; let b = (hpart >> 16u32 & 0xFFu32) as u8; let c = (hpart >> 8u32 & 0xFFu32) as u8; let d = (hpart & 0xFFu32) as u8; - rs += [a, b, c, d]; + rs += [a, b, c, d]/~; } ret rs; } @@ -231,9 +231,9 @@ fn sha1() -> sha1 { self.h[4] = 0xC3D2E1F0u32; self.computed = false; } - fn input(msg: [u8]) { add_input(self, msg); } + fn input(msg: [u8]/~) { add_input(self, msg); } fn input_str(msg: str) { add_input(self, str::bytes(msg)); } - fn result() -> [u8] { ret mk_result(self); } + fn result() -> [u8]/~ { ret mk_result(self); } fn result_str() -> str { let r = mk_result(self); let mut s = ""; @@ -260,7 +260,7 @@ mod tests { #[test] fn test() unsafe { - type test = {input: str, output: [u8]}; + type test = {input: str, output: [u8]/~}; fn a_million_letter_a() -> str { let mut i = 0; @@ -270,14 +270,14 @@ mod tests { } // Test messages from FIPS 180-1 - let fips_180_1_tests: [test] = + let fips_180_1_tests: [test]/~ = [{input: "abc", output: [0xA9u8, 0x99u8, 0x3Eu8, 0x36u8, 0x47u8, 0x06u8, 0x81u8, 0x6Au8, 0xBAu8, 0x3Eu8, 0x25u8, 0x71u8, 0x78u8, 0x50u8, 0xC2u8, 0x6Cu8, - 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8]}, + 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8]/~}, {input: "abcdbcdecdefdefgefghfghighij" + "hijkijkljklmklmnlmnomnopnopq", @@ -286,33 +286,33 @@ mod tests { 0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8, 0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8, 0xF9u8, 0x51u8, 0x29u8, 0xE5u8, - 0xE5u8, 0x46u8, 0x70u8, 0xF1u8]}, + 0xE5u8, 0x46u8, 0x70u8, 0xF1u8]/~}, {input: a_million_letter_a(), output: [0x34u8, 0xAAu8, 0x97u8, 0x3Cu8, 0xD4u8, 0xC4u8, 0xDAu8, 0xA4u8, 0xF6u8, 0x1Eu8, 0xEBu8, 0x2Bu8, 0xDBu8, 0xADu8, 0x27u8, 0x31u8, - 0x65u8, 0x34u8, 0x01u8, 0x6Fu8]}]; + 0x65u8, 0x34u8, 0x01u8, 0x6Fu8]/~}]/~; // Examples from wikipedia - let wikipedia_tests: [test] = + let wikipedia_tests: [test]/~ = [{input: "The quick brown fox jumps over the lazy dog", output: [0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8, 0x7au8, 0x2du8, 0x28u8, 0xfcu8, 0xedu8, 0x84u8, 0x9eu8, 0xe1u8, 0xbbu8, 0x76u8, 0xe7u8, 0x39u8, - 0x1bu8, 0x93u8, 0xebu8, 0x12u8]}, + 0x1bu8, 0x93u8, 0xebu8, 0x12u8]/~}, {input: "The quick brown fox jumps over the lazy cog", output: [0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8, 0xd2u8, 0x5eu8, 0x1bu8, 0x3au8, 0xfau8, 0xd3u8, 0xe8u8, 0x5au8, 0x0bu8, 0xd1u8, 0x7du8, 0x9bu8, - 0x10u8, 0x0du8, 0xb4u8, 0xb3u8]}]; + 0x10u8, 0x0du8, 0xb4u8, 0xb3u8]/~}]/~; let tests = fips_180_1_tests + wikipedia_tests; - fn check_vec_eq(v0: [u8], v1: [u8]) { + fn check_vec_eq(v0: [u8]/~, v1: [u8]/~) { assert (vec::len::(v0) == vec::len::(v1)); let len = vec::len::(v0); let mut i = 0u; diff --git a/src/libstd/sort.rs b/src/libstd/sort.rs index 5c5a6e09d2f..e3ab3b09707 100644 --- a/src/libstd/sort.rs +++ b/src/libstd/sort.rs @@ -1,5 +1,5 @@ #[doc = "Sorting methods"]; -import vec::len; +import vec::{len, push}; import int::{eq, ord}; export le; @@ -15,18 +15,19 @@ Merge sort. Returns a new vector containing the sorted list. Has worst case O(n log n) performance, best case O(n), but is not space efficient. This is a stable sort. "] -fn merge_sort(le: le, v: [const T]) -> [T] { +fn merge_sort(le: le, v: [const T]/~) -> [T]/~ { type slice = (uint, uint); ret merge_sort_(le, v, (0u, len(v))); - fn merge_sort_(le: le, v: [const T], slice: slice) -> [T] { + fn merge_sort_(le: le, v: [const T]/~, slice: slice) + -> [T]/~ { let begin = tuple::first(slice); let end = tuple::second(slice); let v_len = end - begin; - if v_len == 0u { ret []; } - if v_len == 1u { ret [v[begin]]; } + if v_len == 0u { ret []/~; } + if v_len == 1u { ret [v[begin]]/~; } let mid = v_len / 2u + begin; let a = (begin, mid); @@ -34,8 +35,8 @@ fn merge_sort(le: le, v: [const T]) -> [T] { ret merge(le, merge_sort_(le, v, a), merge_sort_(le, v, b)); } - fn merge(le: le, a: [T], b: [T]) -> [T] { - let mut rs = []; + fn merge(le: le, a: [T]/~, b: [T]/~) -> [T]/~ { + let mut rs = []/~; vec::reserve(rs, len(a) + len(b)); let a_len = len(a); let mut a_ix = 0u; @@ -53,7 +54,7 @@ fn merge_sort(le: le, v: [const T]) -> [T] { } } -fn part(compare_func: le, arr: [mut T], left: uint, +fn part(compare_func: le, arr: [mut T]/~, left: uint, right: uint, pivot: uint) -> uint { let pivot_value = arr[pivot]; arr[pivot] <-> arr[right]; @@ -70,7 +71,7 @@ fn part(compare_func: le, arr: [mut T], left: uint, ret storage_index; } -fn qsort(compare_func: le, arr: [mut T], left: uint, +fn qsort(compare_func: le, arr: [mut T]/~, left: uint, right: uint) { if right > left { let pivot = (left + right) / 2u; @@ -89,13 +90,13 @@ Quicksort. Sorts a mut vector in place. Has worst case O(n^2) performance, average case O(n log n). This is an unstable sort. "] -fn quick_sort(compare_func: le, arr: [mut T]) { +fn quick_sort(compare_func: le, arr: [mut T]/~) { if len::(arr) == 0u { ret; } qsort::(compare_func, arr, 0u, len::(arr) - 1u); } fn qsort3(compare_func_lt: le, compare_func_eq: le, - arr: [mut T], left: int, right: int) { + arr: [mut T]/~, left: int, right: int) { if right <= left { ret; } let v: T = arr[right]; let mut i: int = left - 1; @@ -145,14 +146,14 @@ fn qsort3(compare_func_lt: le, compare_func_eq: le, #[doc = " Fancy quicksort. Sorts a mut vector in place. -Based on algorithm presented by [Sedgewick and Bentley] +Based on algorithm presented by [Sedgewick and Bentley]/~ (http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf). According to these slides this is the algorithm of choice for 'randomly ordered keys, abstract compare' & 'small number of key values'. This is an unstable sort. "] -fn quick_sort3(arr: [mut T]) { +fn quick_sort3(arr: [mut T]/~) { if len::(arr) == 0u { ret; } qsort3::({ |x, y| x.lt(y) }, { |x, y| x.eq(y) }, arr, 0, (len::(arr) as int) - 1); @@ -160,7 +161,7 @@ fn quick_sort3(arr: [mut T]) { #[cfg(test)] mod test_qsort3 { - fn check_sort(v1: [mut int], v2: [mut int]) { + fn check_sort(v1: [mut int]/~, v2: [mut int]/~) { let len = vec::len::(v1); quick_sort3::(v1); let mut i = 0u; @@ -174,24 +175,24 @@ mod test_qsort3 { #[test] fn test() { { - let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]; - let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]; + let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]/~; + let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]/~; check_sort(v1, v2); } { - let v1 = [mut 1, 1, 1]; - let v2 = [mut 1, 1, 1]; + let v1 = [mut 1, 1, 1]/~; + let v2 = [mut 1, 1, 1]/~; check_sort(v1, v2); } { - let v1: [mut int] = [mut]; - let v2: [mut int] = [mut]; + let v1: [mut int]/~ = [mut]/~; + let v2: [mut int]/~ = [mut]/~; check_sort(v1, v2); } - { let v1 = [mut 9]; let v2 = [mut 9]; check_sort(v1, v2); } + { let v1 = [mut 9]/~; let v2 = [mut 9]/~; check_sort(v1, v2); } { - let v1 = [mut 9, 3, 3, 3, 9]; - let v2 = [mut 3, 3, 3, 9, 9]; + let v1 = [mut 9, 3, 3, 3, 9]/~; + let v2 = [mut 3, 3, 3, 9, 9]/~; check_sort(v1, v2); } } @@ -199,7 +200,7 @@ mod test_qsort3 { #[cfg(test)] mod test_qsort { - fn check_sort(v1: [mut int], v2: [mut int]) { + fn check_sort(v1: [mut int]/~, v2: [mut int]/~) { let len = vec::len::(v1); fn leual(&&a: int, &&b: int) -> bool { ret a <= b; } let f = leual; @@ -215,24 +216,24 @@ mod test_qsort { #[test] fn test() { { - let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]; - let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]; + let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]/~; + let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]/~; check_sort(v1, v2); } { - let v1 = [mut 1, 1, 1]; - let v2 = [mut 1, 1, 1]; + let v1 = [mut 1, 1, 1]/~; + let v2 = [mut 1, 1, 1]/~; check_sort(v1, v2); } { - let v1: [mut int] = [mut]; - let v2: [mut int] = [mut]; + let v1: [mut int]/~ = [mut]/~; + let v2: [mut int]/~ = [mut]/~; check_sort(v1, v2); } - { let v1 = [mut 9]; let v2 = [mut 9]; check_sort(v1, v2); } + { let v1 = [mut 9]/~; let v2 = [mut 9]/~; check_sort(v1, v2); } { - let v1 = [mut 9, 3, 3, 3, 9]; - let v2 = [mut 3, 3, 3, 9, 9]; + let v1 = [mut 9, 3, 3, 3, 9]/~; + let v2 = [mut 3, 3, 3, 9, 9]/~; check_sort(v1, v2); } } @@ -240,9 +241,9 @@ mod test_qsort { // Regression test for #750 #[test] fn test_simple() { - let names = [mut 2, 1, 3]; + let names = [mut 2, 1, 3]/~; - let expected = [1, 2, 3]; + let expected = [1, 2, 3]/~; fn le(&&a: int, &&b: int) -> bool { int::le(a, b) } sort::quick_sort(le, names); @@ -261,7 +262,7 @@ mod test_qsort { #[cfg(test)] mod tests { - fn check_sort(v1: [int], v2: [int]) { + fn check_sort(v1: [int]/~, v2: [int]/~) { let len = vec::len::(v1); fn le(&&a: int, &&b: int) -> bool { ret a <= b; } let f = le; @@ -277,16 +278,16 @@ mod tests { #[test] fn test() { { - let v1 = [3, 7, 4, 5, 2, 9, 5, 8]; - let v2 = [2, 3, 4, 5, 5, 7, 8, 9]; + let v1 = [3, 7, 4, 5, 2, 9, 5, 8]/~; + let v2 = [2, 3, 4, 5, 5, 7, 8, 9]/~; check_sort(v1, v2); } - { let v1 = [1, 1, 1]; let v2 = [1, 1, 1]; check_sort(v1, v2); } - { let v1: [int] = []; let v2: [int] = []; check_sort(v1, v2); } - { let v1 = [9]; let v2 = [9]; check_sort(v1, v2); } + { let v1 = [1, 1, 1]/~; let v2 = [1, 1, 1]/~; check_sort(v1, v2); } + { let v1:[int]/~ = []/~; let v2:[int]/~ = []/~; check_sort(v1, v2); } + { let v1 = [9]/~; let v2 = [9]/~; check_sort(v1, v2); } { - let v1 = [9, 3, 3, 3, 9]; - let v2 = [3, 3, 3, 9, 9]; + let v1 = [9, 3, 3, 3, 9]/~; + let v2 = [3, 3, 3, 9, 9]/~; check_sort(v1, v2); } } @@ -294,9 +295,9 @@ mod tests { #[test] fn test_merge_sort_mutable() { fn le(&&a: int, &&b: int) -> bool { ret a <= b; } - let v1 = [mut 3, 2, 1]; + let v1 = [mut 3, 2, 1]/~; let v2 = merge_sort(le, v1); - assert v2 == [1, 2, 3]; + assert v2 == [1, 2, 3]/~; } } diff --git a/src/libstd/term.rs b/src/libstd/term.rs index 3bbde56693a..fdbdc7205da 100644 --- a/src/libstd/term.rs +++ b/src/libstd/term.rs @@ -23,18 +23,18 @@ const color_bright_magenta: u8 = 13u8; const color_bright_cyan: u8 = 14u8; const color_bright_white: u8 = 15u8; -fn esc(writer: io::writer) { writer.write([0x1bu8, '[' as u8]); } +fn esc(writer: io::writer) { writer.write([0x1bu8, '[' as u8]/~); } #[doc = "Reset the foreground and background colors to default"] fn reset(writer: io::writer) { esc(writer); - writer.write(['0' as u8, 'm' as u8]); + writer.write(['0' as u8, 'm' as u8]/~); } #[doc = "Returns true if the terminal supports color"] fn color_supported() -> bool { let supported_terms = ["xterm-color", "xterm", - "screen-bce", "xterm-256color"]; + "screen-bce", "xterm-256color"]/~; ret alt os::getenv("TERM") { option::some(env) { for vec::each(supported_terms) {|term| @@ -50,8 +50,8 @@ fn set_color(writer: io::writer, first_char: u8, color: u8) { assert (color < 16u8); esc(writer); let mut color = color; - if color >= 8u8 { writer.write(['1' as u8, ';' as u8]); color -= 8u8; } - writer.write([first_char, ('0' as u8) + color, 'm' as u8]); + if color >= 8u8 { writer.write(['1' as u8, ';' as u8]/~); color -= 8u8; } + writer.write([first_char, ('0' as u8) + color, 'm' as u8]/~); } #[doc = "Set the foreground color"] diff --git a/src/libstd/test.rs b/src/libstd/test.rs index 01b413054fb..beefdd06385 100644 --- a/src/libstd/test.rs +++ b/src/libstd/test.rs @@ -49,7 +49,7 @@ type test_desc = { // The default console test runner. It accepts the command line // arguments and a vector of test_descs (generated at compile time). -fn test_main(args: [str], tests: [test_desc]) { +fn test_main(args: [str]/~, tests: [test_desc]/~) { let opts = alt parse_opts(args) { either::left(o) { o } @@ -64,9 +64,9 @@ type test_opts = {filter: option, run_ignored: bool, type opt_res = either; // Parses command line arguments into test options -fn parse_opts(args: [str]) -> opt_res { +fn parse_opts(args: [str]/~) -> opt_res { let args_ = vec::tail(args); - let opts = [getopts::optflag("ignored"), getopts::optopt("logfile")]; + let opts = [getopts::optflag("ignored"), getopts::optopt("logfile")]/~; let match = alt getopts::getopts(args_, opts) { ok(m) { m } @@ -97,11 +97,11 @@ type console_test_state = mut passed: uint, mut failed: uint, mut ignored: uint, - mut failures: [test_desc]}; + mut failures: [test_desc]/~}; // A simple console test runner fn run_tests_console(opts: test_opts, - tests: [test_desc]) -> bool { + tests: [test_desc]/~) -> bool { fn callback(event: testevent, st: console_test_state) { alt event { @@ -128,7 +128,7 @@ fn run_tests_console(opts: test_opts, st.failed += 1u; write_failed(st.out, st.use_color); st.out.write_line(""); - st.failures += [copy test]; + st.failures += [copy test]/~; } tr_ignored { st.ignored += 1u; @@ -142,7 +142,7 @@ fn run_tests_console(opts: test_opts, let log_out = alt opts.logfile { some(path) { - alt io::file_writer(path, [io::create, io::truncate]) { + alt io::file_writer(path, [io::create, io::truncate]/~) { result::ok(w) { some(w) } result::err(s) { fail(#fmt("can't open output file: %s", s)) @@ -160,7 +160,7 @@ fn run_tests_console(opts: test_opts, mut passed: 0u, mut failed: 0u, mut ignored: 0u, - mut failures: []}; + mut failures: []/~}; run_tests(opts, tests, {|x|callback(x, st)}); @@ -250,7 +250,7 @@ fn should_sort_failures_before_printing_them() { mut passed: 0u, mut failed: 0u, mut ignored: 0u, - mut failures: [test_b, test_a]}; + mut failures: [test_b, test_a]/~}; print_failures(st); @@ -264,14 +264,14 @@ fn should_sort_failures_before_printing_them() { fn use_color() -> bool { ret get_concurrency() == 1u; } enum testevent { - te_filtered([test_desc]), + te_filtered([test_desc]/~), te_wait(test_desc), te_result(test_desc, test_result), } type monitor_msg = (test_desc, test_result); -fn run_tests(opts: test_opts, tests: [test_desc], +fn run_tests(opts: test_opts, tests: [test_desc]/~, callback: fn@(testevent)) { let mut filtered_tests = filter_tests(opts, tests); @@ -329,7 +329,7 @@ fn get_concurrency() -> uint { #[warn(no_non_implicitly_copyable_typarams)] fn filter_tests(opts: test_opts, - tests: [test_desc]) -> [test_desc] { + tests: [test_desc]/~) -> [test_desc]/~ { let mut filtered = copy tests; // Remove tests that don't match the test filter @@ -482,7 +482,7 @@ mod tests { #[test] fn first_free_arg_should_be_a_filter() { - let args = ["progname", "filter"]; + let args = ["progname", "filter"]/~; let opts = alt parse_opts(args) { either::left(o) { o } _ { fail "Malformed arg in first_free_arg_should_be_a_filter"; } }; assert (str::eq("filter", option::get(opts.filter))); @@ -490,7 +490,7 @@ mod tests { #[test] fn parse_ignored_flag() { - let args = ["progname", "filter", "--ignored"]; + let args = ["progname", "filter", "--ignored"]/~; let opts = alt parse_opts(args) { either::left(o) { o } _ { fail "Malformed arg in parse_ignored_flag"; } }; assert (opts.run_ignored); @@ -505,7 +505,7 @@ mod tests { logfile: option::none}; let tests = [{name: "1", fn: fn~() { }, ignore: true, should_fail: false}, - {name: "2", fn: fn~() { }, ignore: false, should_fail: false}]; + {name: "2", fn: fn~() { }, ignore: false, should_fail: false}]/~; let filtered = filter_tests(opts, tests); assert (vec::len(filtered) == 1u); @@ -524,15 +524,15 @@ mod tests { "test::ignored_tests_result_in_ignored", "test::first_free_arg_should_be_a_filter", "test::parse_ignored_flag", "test::filter_for_ignored_option", - "test::sort_tests"]; + "test::sort_tests"]/~; let tests = { let testfn = fn~() { }; - let mut tests = []; + let mut tests = []/~; for vec::each(names) {|name| let test = {name: name, fn: copy testfn, ignore: false, should_fail: false}; - tests += [test]; + tests += [test]/~; } tests }; @@ -543,7 +543,7 @@ mod tests { "test::do_not_run_ignored_tests", "test::filter_for_ignored_option", "test::first_free_arg_should_be_a_filter", "test::ignored_tests_result_in_ignored", "test::parse_ignored_flag", - "test::sort_tests"]; + "test::sort_tests"]/~; let pairs = vec::zip(expected, filtered); diff --git a/src/libstd/time.rs b/src/libstd/time.rs index d06749b5757..b2d5ea64a98 100644 --- a/src/libstd/time.rs +++ b/src/libstd/time.rs @@ -66,14 +66,14 @@ fn tzset() { } type tm = { - tm_sec: i32, // seconds after the minute [0-60] - tm_min: i32, // minutes after the hour [0-59] - tm_hour: i32, // hours after midnight [0-23] - tm_mday: i32, // days of the month [1-31] - tm_mon: i32, // months since January [0-11] + tm_sec: i32, // seconds after the minute [0-60]/~ + tm_min: i32, // minutes after the hour [0-59]/~ + tm_hour: i32, // hours after midnight [0-23]/~ + tm_mday: i32, // days of the month [1-31]/~ + tm_mon: i32, // months since January [0-11]/~ tm_year: i32, // years since 1900 - tm_wday: i32, // days since Sunday [0-6] - tm_yday: i32, // days since January 1 [0-365] + tm_wday: i32, // days since Sunday [0-6]/~ + tm_yday: i32, // days since January 1 [0-365]/~ tm_isdst: i32, // Daylight Savings Time flag tm_gmtoff: i32, // offset from UTC in seconds tm_zone: str, // timezone abbreviation @@ -151,7 +151,7 @@ fn strptime(s: str, format: str) -> result { ret true; } - fn match_strs(s: str, pos: uint, strs: [(str, i32)]) + fn match_strs(s: str, pos: uint, strs: [(str, i32)]/~) -> option<(i32, uint)> { let mut i = 0u; let len = vec::len(strs); @@ -214,7 +214,7 @@ fn strptime(s: str, format: str) -> result { ("Thursday", 4_i32), ("Friday", 5_i32), ("Saturday", 6_i32) - ]) { + ]/~) { some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) } none { err("Invalid day") } } @@ -228,7 +228,7 @@ fn strptime(s: str, format: str) -> result { ("Thu", 4_i32), ("Fri", 5_i32), ("Sat", 6_i32) - ]) { + ]/~) { some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) } none { err("Invalid day") } } @@ -247,7 +247,7 @@ fn strptime(s: str, format: str) -> result { ("October", 9_i32), ("November", 10_i32), ("December", 11_i32) - ]) { + ]/~) { some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) } none { err("Invalid month") } } @@ -266,7 +266,7 @@ fn strptime(s: str, format: str) -> result { ("Oct", 9_i32), ("Nov", 10_i32), ("Dec", 11_i32) - ]) { + ]/~) { some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) } none { err("Invalid month") } } @@ -385,13 +385,13 @@ fn strptime(s: str, format: str) -> result { } 'n' { parse_char(s, pos, '\n') } 'P' { - alt match_strs(s, pos, [("am", 0_i32), ("pm", 12_i32)]) { + alt match_strs(s, pos, [("am", 0_i32), ("pm", 12_i32)]/~) { some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) } none { err("Invalid hour") } } } 'p' { - alt match_strs(s, pos, [("AM", 0_i32), ("PM", 12_i32)]) { + alt match_strs(s, pos, [("AM", 0_i32), ("PM", 12_i32)]/~) { some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) } none { err("Invalid hour") } } @@ -1010,7 +1010,7 @@ mod tests { "Thursday", "Friday", "Saturday" - ].iter { |day| assert test(day, "%A"); } + ]/~.iter { |day| assert test(day, "%A"); } [ "Sun", @@ -1020,7 +1020,7 @@ mod tests { "Thu", "Fri", "Sat" - ].iter { |day| assert test(day, "%a"); } + ]/~.iter { |day| assert test(day, "%a"); } [ "January", @@ -1035,7 +1035,7 @@ mod tests { "October", "November", "December" - ].iter { |day| assert test(day, "%B"); } + ]/~.iter { |day| assert test(day, "%B"); } [ "Jan", @@ -1050,7 +1050,7 @@ mod tests { "Oct", "Nov", "Dec" - ].iter { |day| assert test(day, "%b"); } + ]/~.iter { |day| assert test(day, "%b"); } assert test("19", "%C"); assert test("Fri Feb 13 23:31:30 2009", "%c"); diff --git a/src/libstd/timer.rs b/src/libstd/timer.rs index 3ee92e5f074..f6981ce39e0 100644 --- a/src/libstd/timer.rs +++ b/src/libstd/timer.rs @@ -167,7 +167,7 @@ mod test { [(1u, 20u), (10u, 10u), - (20u, 2u)] + (20u, 2u)]/~ }; diff --git a/src/libstd/uv_ll.rs b/src/libstd/uv_ll.rs index b7e1ee7d82c..8011d816fea 100644 --- a/src/libstd/uv_ll.rs +++ b/src/libstd/uv_ll.rs @@ -24,7 +24,7 @@ import libc::size_t; // libuv struct mappings type uv_ip4_addr = { - ip: [u8], + ip: [u8]/~, port: int }; type uv_ip6_addr = uv_ip4_addr; @@ -616,7 +616,7 @@ unsafe fn accept(server: *libc::c_void, client: *libc::c_void) } unsafe fn write(req: *uv_write_t, stream: *T, - buf_in: *[uv_buf_t], cb: *u8) -> libc::c_int { + buf_in: *[uv_buf_t]/~, cb: *u8) -> libc::c_int { let buf_ptr = vec::unsafe::to_ptr(*buf_in); let buf_cnt = vec::len(*buf_in) as i32; ret rustrt::rust_uv_write(req as *libc::c_void, @@ -678,7 +678,7 @@ unsafe fn buf_init(++input: *u8, len: uint) -> uv_buf_t { unsafe fn ip4_addr(ip: str, port: int) -> sockaddr_in { let mut addr_vec = str::bytes(ip); - addr_vec += [0u8]; // add null terminator + addr_vec += [0u8]/~; // add null terminator let addr_vec_ptr = vec::unsafe::to_ptr(addr_vec); let ip_back = str::from_bytes(addr_vec); log(debug, #fmt("vec val: '%s' length: %u", @@ -795,13 +795,13 @@ type uv_err_data = { mod test { enum tcp_read_data { tcp_read_eof, - tcp_read_more([u8]), + tcp_read_more([u8]/~), tcp_read_error } type request_wrapper = { write_req: *uv_write_t, - req_buf: *[uv_buf_t], + req_buf: *[uv_buf_t]/~, read_chan: *comm::chan }; @@ -917,7 +917,7 @@ mod test { log(debug, #fmt("req_msg ptr: %u", req_msg_ptr as uint)); let req_msg = [ buf_init(req_msg_ptr, vec::len(req_str_bytes)) - ]; + ]/~; // this is the enclosing record, we'll pass a ptr to // this to C.. let write_handle = write_t(); @@ -1115,7 +1115,7 @@ mod test { client: *uv_tcp_t, server: *uv_tcp_t, server_kill_msg: str, - server_resp_buf: *[uv_buf_t], + server_resp_buf: *[uv_buf_t]/~, server_chan: *comm::chan, server_write_req: *uv_write_t }; @@ -1164,7 +1164,7 @@ mod test { log(debug, #fmt("resp_msg ptr: %u", resp_msg_ptr as uint)); let resp_msg = [ buf_init(resp_msg_ptr, vec::len(resp_str_bytes)) - ]; + ]/~; let continue_async_handle = async_t(); let continue_async_handle_ptr = diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index d6d2d4f3165..936718dd0a7 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -41,9 +41,9 @@ type fn_ident = option; #[auto_serialize] type path = {span: span, global: bool, - idents: [ident], + idents: [ident]/~, rp: option<@region>, - types: [@ty]}; + types: [@ty]/~}; #[auto_serialize] type crate_num = int; @@ -66,7 +66,7 @@ enum ty_param_bound { } #[auto_serialize] -type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]}; +type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]/~}; #[auto_serialize] enum def { @@ -92,19 +92,19 @@ enum def { // The set of meta_items that define the compilation environment of the crate, // used to drive conditional compilation -type crate_cfg = [@meta_item]; +type crate_cfg = [@meta_item]/~; type crate = spanned; type crate_ = - {directives: [@crate_directive], + {directives: [@crate_directive]/~, module: _mod, - attrs: [attribute], + attrs: [attribute]/~, config: crate_cfg}; enum crate_directive_ { - cdir_src_mod(ident, [attribute]), - cdir_dir_mod(ident, [@crate_directive], [attribute]), + cdir_src_mod(ident, [attribute]/~), + cdir_dir_mod(ident, [@crate_directive]/~, [attribute]/~), // NB: cdir_view_item is *not* processed by the rest of the compiler, the // attached view_items are sunk into the crate's module during parsing, @@ -124,7 +124,7 @@ type meta_item = spanned; #[auto_serialize] enum meta_item_ { meta_word(ident), - meta_list(ident, [@meta_item]), + meta_list(ident, [@meta_item]/~), meta_name_value(ident, lit), } @@ -132,8 +132,11 @@ enum meta_item_ { type blk = spanned; #[auto_serialize] -type blk_ = {view_items: [@view_item], stmts: [@stmt], expr: option<@expr>, - id: node_id, rules: blk_check_mode}; +type blk_ = {view_items: [@view_item]/~, + stmts: [@stmt]/~, + expr: option<@expr>, + id: node_id, + rules: blk_check_mode}; #[auto_serialize] type pat = {id: node_id, node: pat_, span: span}; @@ -152,10 +155,10 @@ enum pat_ { // records this pattern's node_id in an auxiliary // set (of "pat_idents that refer to nullary enums") pat_ident(@path, option<@pat>), - pat_enum(@path, option<[@pat]>), // "none" means a * pattern where + pat_enum(@path, option<[@pat]/~>), // "none" means a * pattern where // we don't bind the fields to names - pat_rec([field_pat], bool), - pat_tup([@pat]), + pat_rec([field_pat]/~, bool), + pat_tup([@pat]/~), pat_box(@pat), pat_uniq(@pat), pat_lit(@expr), @@ -267,10 +270,10 @@ type local = spanned; type decl = spanned; #[auto_serialize] -enum decl_ { decl_local([@local]), decl_item(@item), } +enum decl_ { decl_local([@local]/~), decl_item(@item), } #[auto_serialize] -type arm = {pats: [@pat], guard: option<@expr>, body: blk}; +type arm = {pats: [@pat]/~, guard: option<@expr>, body: blk}; #[auto_serialize] type field_ = {mutbl: mutability, ident: ident, expr: @expr}; @@ -293,10 +296,10 @@ enum alt_mode { alt_check, alt_exhaustive, } #[auto_serialize] enum expr_ { expr_vstore(@expr, vstore), - expr_vec([@expr], mutability), - expr_rec([field], option<@expr>), - expr_call(@expr, [@expr], bool), // True iff last argument is a block - expr_tup([@expr]), + expr_vec([@expr]/~, mutability), + expr_rec([field]/~, option<@expr>), + expr_call(@expr, [@expr]/~, bool), // True iff last argument is a block + expr_tup([@expr]/~), expr_binary(binop, @expr, @expr), expr_unary(unop, @expr), expr_lit(@lit), @@ -307,7 +310,7 @@ enum expr_ { Same semantics as while(true) { body }, but typestate knows that the (implicit) condition is always true. */ expr_loop(blk), - expr_alt(@expr, [arm], alt_mode), + expr_alt(@expr, [arm]/~, alt_mode), expr_fn(proto, fn_decl, blk, capture_clause), expr_fn_block(fn_decl, blk, capture_clause), // Inner expr is always an expr_fn_block. We need the wrapping node to @@ -327,7 +330,7 @@ enum expr_ { expr_assign(@expr, @expr), expr_swap(@expr, @expr), expr_assign_op(binop, @expr, @expr), - expr_field(@expr, ident, [@ty]), + expr_field(@expr, ident, [@ty]/~), expr_index(@expr, @expr), expr_path(@path), expr_addr_of(mutability, @expr), @@ -359,7 +362,7 @@ type capture_item = @{ }; #[auto_serialize] -type capture_clause = @[capture_item]; +type capture_clause = @[capture_item]/~; /* // Says whether this is a block the user marked as @@ -373,7 +376,7 @@ enum blk_sort { #[auto_serialize] enum token_tree { /* for macro invocations; parsing is the macro's job */ - tt_delim([token_tree]), + tt_delim([token_tree]/~), tt_flat(span, token::token) } @@ -384,7 +387,7 @@ type matcher = spanned; enum matcher_ { mtc_tok(token::token), /* body, separator, zero ok? : */ - mtc_rep([matcher], option, bool), + mtc_rep([matcher]/~, option, bool), mtc_bb(ident, ident, uint) } @@ -438,8 +441,8 @@ type ty_field_ = {ident: ident, mt: mt}; type ty_field = spanned; #[auto_serialize] -type ty_method = {ident: ident, attrs: [attribute], - decl: fn_decl, tps: [ty_param], span: span}; +type ty_method = {ident: ident, attrs: [attribute]/~, + decl: fn_decl, tps: [ty_param]/~, span: span}; #[auto_serialize] enum int_ty { ty_i, ty_char, ty_i8, ty_i16, ty_i32, ty_i64, } @@ -478,11 +481,11 @@ enum ty_ { ty_vec(mt), ty_ptr(mt), ty_rptr(@region, mt), - ty_rec([ty_field]), + ty_rec([ty_field]/~), ty_fn(proto, fn_decl), - ty_tup([@ty]), + ty_tup([@ty]/~), ty_path(@path, node_id), - ty_constr(@ty, [@ty_constr]), + ty_constr(@ty, [@ty_constr]/~), ty_vstore(@ty, vstore), ty_mac(mac), // ty_infer means the type should be inferred instead of it having been @@ -522,7 +525,7 @@ type constr_arg = spanned; #[auto_serialize] type constr_general_ = - {path: @path, args: [@sp_constr_arg], id: ID}; + {path: @path, args: [@sp_constr_arg]/~, id: ID}; // In the front end, constraints have a node ID attached. // Typeck turns this to a def_id, using the output of resolve. @@ -549,11 +552,11 @@ type arg = {mode: mode, ty: @ty, ident: ident, id: node_id}; #[auto_serialize] type fn_decl = - {inputs: [arg], + {inputs: [arg]/~, output: @ty, purity: purity, cf: ret_style, - constraints: [@constr]}; + constraints: [@constr]/~}; #[auto_serialize] enum purity { @@ -571,14 +574,14 @@ enum ret_style { } #[auto_serialize] -type method = {ident: ident, attrs: [attribute], - tps: [ty_param], decl: fn_decl, body: blk, +type method = {ident: ident, attrs: [attribute]/~, + tps: [ty_param]/~, decl: fn_decl, body: blk, id: node_id, span: span, self_id: node_id, vis: visibility}; // always public, unless it's a // class method #[auto_serialize] -type _mod = {view_items: [@view_item], items: [@item]}; +type _mod = {view_items: [@view_item]/~, items: [@item]/~}; #[auto_serialize] enum native_abi { @@ -589,14 +592,14 @@ enum native_abi { #[auto_serialize] type native_mod = - {view_items: [@view_item], - items: [@native_item]}; + {view_items: [@view_item]/~, + items: [@native_item]/~}; #[auto_serialize] type variant_arg = {ty: @ty, id: node_id}; #[auto_serialize] -type variant_ = {name: ident, attrs: [attribute], args: [variant_arg], +type variant_ = {name: ident, attrs: [attribute]/~, args: [variant_arg]/~, id: node_id, disr_expr: option<@expr>, vis: visibility}; #[auto_serialize] @@ -625,18 +628,18 @@ enum view_path_ { view_path_glob(@path, node_id), // foo::bar::{a,b,c} - view_path_list(@path, [path_list_ident], node_id) + view_path_list(@path, [path_list_ident]/~, node_id) } #[auto_serialize] -type view_item = {node: view_item_, attrs: [attribute], +type view_item = {node: view_item_, attrs: [attribute]/~, vis: visibility, span: span}; #[auto_serialize] enum view_item_ { - view_item_use(ident, [@meta_item], node_id), - view_item_import([@view_path]), - view_item_export([@view_path]) + view_item_use(ident, [@meta_item]/~, node_id), + view_item_import([@view_path]/~), + view_item_export([@view_path]/~) } // Meta-data associated with an item @@ -663,7 +666,7 @@ type iface_ref = {path: @path, id: node_id}; enum visibility { public, private } #[auto_serialize] -type item = {ident: ident, attrs: [attribute], +type item = {ident: ident, attrs: [attribute]/~, id: node_id, node: item_, vis: visibility, span: span}; @@ -676,23 +679,23 @@ enum region_param { #[auto_serialize] enum item_ { item_const(@ty, @expr), - item_fn(fn_decl, [ty_param], blk), + item_fn(fn_decl, [ty_param]/~, blk), item_mod(_mod), item_native_mod(native_mod), - item_ty(@ty, [ty_param], region_param), - item_enum([variant], [ty_param], region_param), - item_class([ty_param], /* ty params for class */ - [@iface_ref], /* ifaces this class implements */ - [@class_member], /* methods, etc. */ + item_ty(@ty, [ty_param]/~, region_param), + item_enum([variant]/~, [ty_param]/~, region_param), + item_class([ty_param]/~, /* ty params for class */ + [@iface_ref]/~, /* ifaces this class implements */ + [@class_member]/~, /* methods, etc. */ /* (not including ctor or dtor) */ class_ctor, /* dtor is optional */ option, region_param ), - item_iface([ty_param], region_param, [ty_method]), - item_impl([ty_param], region_param, option<@iface_ref> /* iface */, - @ty /* self */, [@method]), + item_iface([ty_param]/~, region_param, [ty_method]/~), + item_impl([ty_param]/~, region_param, option<@iface_ref> /* iface */, + @ty /* self */, [@method]/~), } #[auto_serialize] @@ -727,14 +730,14 @@ type class_dtor_ = {id: node_id, #[auto_serialize] type native_item = {ident: ident, - attrs: [attribute], + attrs: [attribute]/~, node: native_item_, id: node_id, span: span}; #[auto_serialize] enum native_item_ { - native_item_fn(fn_decl, [ty_param]), + native_item_fn(fn_decl, [ty_param]/~), } // The data we save and restore about an inlined item or method. This is not @@ -745,8 +748,8 @@ enum inlined_item { ii_item(@item), ii_method(def_id /* impl id */, @method), ii_native(@native_item), - ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */), - ii_dtor(class_dtor, ident, [ty_param], def_id /* parent id */) + ii_ctor(class_ctor, ident, [ty_param]/~, def_id /* parent id */), + ii_dtor(class_dtor, ident, [ty_param]/~, def_id /* parent id */) } // diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 219769dd28c..96ecadc2be3 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -7,7 +7,7 @@ import ast_util::inlined_item_methods; import diagnostic::span_handler; enum path_elt { path_mod(ident), path_name(ident) } -type path = [path_elt]; +type path = [path_elt]/~; /* FIXMEs that say "bad" are as per #2543 */ fn path_to_str_with_sep(p: path, sep: str) -> str { @@ -45,9 +45,9 @@ enum ast_node { node_local(uint), // Constructor for a class // def_id is parent id - node_ctor(ident, [ty_param], @class_ctor, def_id, @path), + node_ctor(ident, [ty_param]/~, @class_ctor, def_id, @path), // Destructor for a class - node_dtor([ty_param], @class_dtor, def_id, @path), + node_dtor([ty_param]/~, @class_dtor, def_id, @path), node_block(blk), } @@ -57,7 +57,7 @@ type ctx = {map: map, mut path: path, type vt = visit::vt; fn extend(cx: ctx, +elt: ident) -> @path { - @(cx.path + [path_name(elt)]) + @(cx.path + [path_name(elt)]/~) } fn mk_ast_map_visitor() -> vt { @@ -75,7 +75,7 @@ fn mk_ast_map_visitor() -> vt { fn map_crate(diag: span_handler, c: crate) -> map { let cx = {map: std::map::int_hash(), - mut path: [], + mut path: []/~, mut local_id: 0u, diag: diag}; visit::visit_crate(c, cx, mk_ast_map_visitor()); @@ -229,9 +229,9 @@ fn map_item(i: @item, cx: ctx, v: vt) { } alt i.node { item_mod(_) | item_native_mod(_) { - cx.path += [path_mod(i.ident)]; + cx.path += [path_mod(i.ident)]/~; } - _ { cx.path += [path_name(i.ident)]; } + _ { cx.path += [path_name(i.ident)]/~; } } visit::visit_item(i, cx, v); vec::pop(cx.path); diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index f402e1f6ab5..7c0a7158c17 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -23,7 +23,7 @@ pure fn dummy_sp() -> span { ret mk_sp(0u, 0u); } pure fn path_name(p: @path) -> str { path_name_i(p.idents) } -pure fn path_name_i(idents: [ident]) -> str { +pure fn path_name_i(idents: [ident]/~) -> str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") str::connect(idents.map({|i|*i}), "::") } @@ -246,18 +246,19 @@ fn new_def_hash() -> std::map::hashmap { } fn block_from_expr(e: @expr) -> blk { - let blk_ = default_block([], option::some::<@expr>(e), e.id); + let blk_ = default_block([]/~, option::some::<@expr>(e), e.id); ret {node: blk_, span: e.span}; } -fn default_block(+stmts1: [@stmt], expr1: option<@expr>, id1: node_id) -> +fn default_block(+stmts1: [@stmt]/~, expr1: option<@expr>, id1: node_id) -> blk_ { - {view_items: [], stmts: stmts1, expr: expr1, id: id1, rules: default_blk} + {view_items: []/~, stmts: stmts1, + expr: expr1, id: id1, rules: default_blk} } fn ident_to_path(s: span, +i: ident) -> @path { - @{span: s, global: false, idents: [i], - rp: none, types: []} + @{span: s, global: false, idents: [i]/~, + rp: none, types: []/~} } pure fn is_unguarded(&&a: arm) -> bool { @@ -267,7 +268,7 @@ pure fn is_unguarded(&&a: arm) -> bool { } } -pure fn unguarded_pat(a: arm) -> option<[@pat]> { +pure fn unguarded_pat(a: arm) -> option<[@pat]/~> { if is_unguarded(a) { some(/* FIXME (#2543) */ copy a.pats) } else { none } } @@ -286,14 +287,14 @@ pure fn class_item_ident(ci: @class_member) -> ident { type ivar = {ident: ident, ty: @ty, cm: class_mutability, id: node_id, vis: visibility}; -fn public_methods(ms: [@method]) -> [@method] { +fn public_methods(ms: [@method]/~) -> [@method]/~ { vec::filter(ms, {|m| alt m.vis { public { true } _ { false }}}) } -fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) { - let mut vs = [], ms = []; +fn split_class_items(cs: [@class_member]/~) -> ([ivar]/~, [@method]/~) { + let mut vs = []/~, ms = []/~; for cs.each {|c| alt c.node { instance_var(i, t, cm, id, vis) { @@ -301,9 +302,9 @@ fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) { ty: t, cm: cm, id: id, - vis: vis}]; + vis: vis}]/~; } - class_method(m) { ms += [m]; } + class_method(m) { ms += [m]/~; } } }; (vs, ms) @@ -383,8 +384,8 @@ fn dtor_dec() -> fn_decl { let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()}; // dtor has one argument, of type () {inputs: [{mode: ast::expl(ast::by_ref), - ty: nil_t, ident: @"_", id: 0}], - output: nil_t, purity: impure_fn, cf: return_val, constraints: []} + ty: nil_t, ident: @"_", id: 0}]/~, + output: nil_t, purity: impure_fn, cf: return_val, constraints: []/~} } // ______________________________________________________________________ @@ -471,7 +472,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { } }, - visit_ty_params: fn@(ps: [ty_param]) { + visit_ty_params: fn@(ps: [ty_param]/~) { vec::iter(ps) {|p| vfn(p.id) } }, diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index fb9560065a8..af808222f11 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -56,7 +56,7 @@ fn mk_name_value_item(+name: ast::ident, +value: ast::lit) ret @dummy_spanned(ast::meta_name_value(name, value)); } -fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]) -> +fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]/~) -> @ast::meta_item { ret @dummy_spanned(ast::meta_list(name, items)); } @@ -75,9 +75,9 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute { fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value } // Get the meta_items from inside a vector of attributes -fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { - let mut mitems = []; - for attrs.each {|a| mitems += [attr_meta(a)]; } +fn attr_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ { + let mut mitems = []/~; + for attrs.each {|a| mitems += [attr_meta(a)]/~; } ret mitems; } @@ -118,7 +118,7 @@ fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str> { } #[doc = "Gets a list of inner meta items from a list meta_item type"] -fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]> { +fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]/~> { alt meta.node { ast::meta_list(_, l) { option::some(/* FIXME (#2543) */ copy l) } _ { option::none } @@ -147,8 +147,8 @@ fn get_name_value_str_pair( #[doc = " Search a list of attributes and return only those with a specific name "] -fn find_attrs_by_name(attrs: [ast::attribute], +name: str) -> - [ast::attribute] { +fn find_attrs_by_name(attrs: [ast::attribute]/~, +name: str) -> + [ast::attribute]/~ { let filter = ( fn@(a: ast::attribute) -> option { if *get_attr_name(a) == name { @@ -162,8 +162,8 @@ fn find_attrs_by_name(attrs: [ast::attribute], +name: str) -> #[doc = " Searcha list of meta items and return only those with a specific name "] -fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) -> - [@ast::meta_item] { +fn find_meta_items_by_name(metas: [@ast::meta_item]/~, +name: str) -> + [@ast::meta_item]/~ { let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> { if *get_meta_item_name(m) == name { option::some(m) @@ -176,7 +176,7 @@ fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) -> Returns true if a list of meta items contains another meta item. The comparison is performed structurally. "] -fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool { +fn contains(haystack: [@ast::meta_item]/~, needle: @ast::meta_item) -> bool { #debug("looking for %s", print::pprust::meta_item_to_str(*needle)); for haystack.each {|item| @@ -201,7 +201,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { } ast::meta_list(na, la) { - // [Fixme-sorting] + // [Fixme-sorting]/~ // FIXME (#607): Needs implementing // This involves probably sorting the list by name and // meta_item variant @@ -210,16 +210,16 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { } } -fn contains_name(metas: [@ast::meta_item], +name: str) -> bool { +fn contains_name(metas: [@ast::meta_item]/~, +name: str) -> bool { let matches = find_meta_items_by_name(metas, name); ret vec::len(matches) > 0u; } -fn attrs_contains_name(attrs: [ast::attribute], +name: str) -> bool { +fn attrs_contains_name(attrs: [ast::attribute]/~, +name: str) -> bool { vec::is_not_empty(find_attrs_by_name(attrs, name)) } -fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str) +fn first_attr_value_str_by_name(attrs: [ast::attribute]/~, +name: str) -> option<@str> { let mattrs = find_attrs_by_name(attrs, name); if vec::len(mattrs) > 0u { @@ -229,7 +229,7 @@ fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str) } fn last_meta_item_by_name( - items: [@ast::meta_item], + items: [@ast::meta_item]/~, +name: str ) -> option<@ast::meta_item> { let items = attr::find_meta_items_by_name(items, name); @@ -237,7 +237,7 @@ fn last_meta_item_by_name( } fn last_meta_item_value_str_by_name( - items: [@ast::meta_item], + items: [@ast::meta_item]/~, +name: str ) -> option<@str> { alt last_meta_item_by_name(items, name) { @@ -252,9 +252,9 @@ fn last_meta_item_value_str_by_name( } fn last_meta_item_list_by_name( - items: [@ast::meta_item], + items: [@ast::meta_item]/~, +name: str -) -> option<[@ast::meta_item]> { +) -> option<[@ast::meta_item]/~> { alt last_meta_item_by_name(items, name) { some(item) { attr::get_meta_item_list(item) @@ -268,7 +268,7 @@ fn last_meta_item_list_by_name( // FIXME (#607): This needs to sort by meta_item variant in addition to // the item name (See [Fixme-sorting]) -fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] { +fn sort_meta_items(+items: [@ast::meta_item]/~) -> [@ast::meta_item]/~ { fn lteq(&&ma: @ast::meta_item, &&mb: @ast::meta_item) -> bool { fn key(m: @ast::meta_item) -> ast::ident { alt m.node { @@ -281,13 +281,13 @@ fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] { } // This is sort of stupid here, converting to a vec of mutables and back - let v: [mut @ast::meta_item] = vec::to_mut(items); + let v: [mut @ast::meta_item]/~ = vec::to_mut(items); std::sort::quick_sort(lteq, v); ret vec::from_mut(v); } -fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) -> - [@ast::meta_item] { +fn remove_meta_items_by_name(items: [@ast::meta_item]/~, name: ast::ident) -> + [@ast::meta_item]/~ { ret vec::filter_map(items, { |item| @@ -299,11 +299,11 @@ fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) -> }); } -fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] { - let mut found = []; +fn find_linkage_attrs(attrs: [ast::attribute]/~) -> [ast::attribute]/~ { + let mut found = []/~; for find_attrs_by_name(attrs, "link").each {|attr| alt attr.node.value.node { - ast::meta_list(_, _) { found += [attr] } + ast::meta_list(_, _) { found += [attr]/~ } _ { #debug("ignoring link attribute that has incorrect type"); } } } @@ -314,7 +314,7 @@ fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] { From a list of crate attributes get only the meta_items that impact crate linkage "] -fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { +fn find_linkage_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ { find_linkage_attrs(attrs).flat_map {|attr| alt check attr.node.value.node { ast::meta_list(_, items) { /* FIXME (#2543) */ copy items } @@ -322,7 +322,7 @@ fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { } } -fn native_abi(attrs: [ast::attribute]) -> either { +fn native_abi(attrs: [ast::attribute]/~) -> either { ret alt attr::first_attr_value_str_by_name(attrs, "abi") { option::none { either::right(ast::native_abi_cdecl) @@ -349,7 +349,7 @@ enum inline_attr { } #[doc = "True if something like #[inline] is found in the list of attrs."] -fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr { +fn find_inline_attr(attrs: [ast::attribute]/~) -> inline_attr { // TODO---validate the usage of #[inline] and #[inline(always)] vec::foldl(ia_none, attrs) {|ia,attr| alt attr.node.value.node { @@ -368,7 +368,7 @@ fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr { fn require_unique_names(diagnostic: span_handler, - metas: [@ast::meta_item]) { + metas: [@ast::meta_item]/~) { let map = map::str_hash(); for metas.each {|meta| let name = get_meta_item_name(meta); diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 49560fb5bbd..83085c2cc0f 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -43,7 +43,7 @@ enum file_substr { type filemap = @{name: filename, substr: file_substr, src: @str, - start_pos: file_pos, mut lines: [file_pos]}; + start_pos: file_pos, mut lines: [file_pos]/~}; type codemap = @{files: dvec}; @@ -57,7 +57,7 @@ fn new_filemap_w_substr(+filename: filename, +substr: file_substr, -> filemap { ret @{name: filename, substr: substr, src: src, start_pos: {ch: start_pos_ch, byte: start_pos_byte}, - mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]}; + mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]/~}; } fn new_filemap(+filename: filename, src: @str, @@ -74,7 +74,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> str } fn next_line(file: filemap, chpos: uint, byte_pos: uint) { - file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}]; + file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}]/~; } type lookup_fn = pure fn(file_pos) -> uint; @@ -174,7 +174,7 @@ fn span_to_str(sp: span, cm: codemap) -> str { lo.line, lo.col, hi.line, hi.col) } -type file_lines = {file: filemap, lines: [uint]}; +type file_lines = {file: filemap, lines: [uint]/~}; fn span_to_filename(sp: span, cm: codemap::codemap) -> filename { let lo = lookup_char_pos(cm, sp.lo); @@ -184,8 +184,8 @@ fn span_to_filename(sp: span, cm: codemap::codemap) -> filename { fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines { let lo = lookup_char_pos(cm, sp.lo); let hi = lookup_char_pos(cm, sp.hi); - let mut lines = []; - for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; }; + let mut lines = []/~; + for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]/~; }; ret @{file: lo.file, lines: lines}; } diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index fe9f59d5538..f3c356923f2 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -84,13 +84,13 @@ mod syntax { export parse; } -type ser_tps_map = map::hashmap [@ast::stmt]>; +type ser_tps_map = map::hashmap [@ast::stmt]/~>; type deser_tps_map = map::hashmap @ast::expr>; fn expand(cx: ext_ctxt, span: span, _mitem: ast::meta_item, - in_items: [@ast::item]) -> [@ast::item] { + in_items: [@ast::item]/~) -> [@ast::item]/~ { fn not_auto_serialize(a: ast::attribute) -> bool { attr::get_attr_name(a) != @"auto_serialize" } @@ -103,11 +103,11 @@ fn expand(cx: ext_ctxt, vec::flat_map(in_items) {|in_item| alt in_item.node { ast::item_ty(ty, tps, _) { - [filter_attrs(in_item)] + ty_fns(cx, in_item.ident, ty, tps) + [filter_attrs(in_item)]/~ + ty_fns(cx, in_item.ident, ty, tps) } ast::item_enum(variants, tps, _) { - [filter_attrs(in_item)] + enum_fns(cx, in_item.ident, + [filter_attrs(in_item)]/~ + enum_fns(cx, in_item.ident, in_item.span, variants, tps) } @@ -115,7 +115,7 @@ fn expand(cx: ext_ctxt, cx.span_err(span, "#[auto_serialize] can only be \ applied to type and enum \ definitions"); - [in_item] + [in_item]/~ } } } @@ -126,26 +126,27 @@ impl helpers for ext_ctxt { helper_name: str) -> @ast::path { let head = vec::init(base_path.idents); let tail = vec::last(base_path.idents); - self.path(base_path.span, head + [@(helper_name + "_" + *tail)]) + self.path(base_path.span, head + [@(helper_name + "_" + *tail)]/~) } - fn path(span: span, strs: [ast::ident]) -> @ast::path { - @{span: span, global: false, idents: strs, rp: none, types: []} + fn path(span: span, strs: [ast::ident]/~) -> @ast::path { + @{span: span, global: false, idents: strs, rp: none, types: []/~} } - fn path_tps(span: span, strs: [ast::ident], - tps: [@ast::ty]) -> @ast::path { + fn path_tps(span: span, strs: [ast::ident]/~, + tps: [@ast::ty]/~) -> @ast::path { @{span: span, global: false, idents: strs, rp: none, types: tps} } - fn ty_path(span: span, strs: [ast::ident], tps: [@ast::ty]) -> @ast::ty { + fn ty_path(span: span, strs: [ast::ident]/~, + tps: [@ast::ty]/~) -> @ast::ty { @{id: self.next_id(), node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()), span: span} } fn ty_fn(span: span, - -input_tys: [@ast::ty], + -input_tys: [@ast::ty]/~, -output: @ast::ty) -> @ast::ty { let args = vec::map(input_tys) {|ty| {mode: ast::expl(ast::by_ref), @@ -159,7 +160,7 @@ impl helpers for ext_ctxt { output: output, purity: ast::impure_fn, cf: ast::return_val, - constraints: []}), + constraints: []/~}), span: span} } @@ -172,11 +173,11 @@ impl helpers for ext_ctxt { } fn var_ref(span: span, name: ast::ident) -> @ast::expr { - self.expr(span, ast::expr_path(self.path(span, [name]))) + self.expr(span, ast::expr_path(self.path(span, [name]/~))) } - fn blk(span: span, stmts: [@ast::stmt]) -> ast::blk { - {node: {view_items: [], + fn blk(span: span, stmts: [@ast::stmt]/~) -> ast::blk { + {node: {view_items: []/~, stmts: stmts, expr: none, id: self.next_id(), @@ -185,8 +186,8 @@ impl helpers for ext_ctxt { } fn expr_blk(expr: @ast::expr) -> ast::blk { - {node: {view_items: [], - stmts: [], + {node: {view_items: []/~, + stmts: []/~, expr: some(expr), id: self.next_id(), rules: ast::default_blk}, @@ -194,8 +195,8 @@ impl helpers for ext_ctxt { } fn binder_pat(span: span, nm: ast::ident) -> @ast::pat { - let path = @{span: span, global: false, idents: [nm], - rp: none, types: []}; + let path = @{span: span, global: false, idents: [nm]/~, + rp: none, types: []/~}; @{id: self.next_id(), node: ast::pat_ident(path, none), span: span} @@ -206,7 +207,8 @@ impl helpers for ext_ctxt { span: expr.span} } - fn alt_stmt(arms: [ast::arm], span: span, -v: @ast::expr) -> @ast::stmt { + fn alt_stmt(arms: [ast::arm]/~, + span: span, -v: @ast::expr) -> @ast::stmt { self.stmt( self.expr( span, @@ -277,7 +279,7 @@ impl helpers for ext_ctxt { fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path, -s: @ast::expr, -v: @ast::expr) - -> [@ast::stmt] { + -> [@ast::stmt]/~ { let ext_cx = cx; // required for #ast{} // We want to take a path like a::b::c<...> and generate a call @@ -299,15 +301,15 @@ fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path, [cx.stmt( cx.expr( path.span, - ast::expr_call(callee, [s, v] + ty_args, false)))] + ast::expr_call(callee, [s, v]/~ + ty_args, false)))]/~ } fn ser_variant(cx: ext_ctxt, tps: ser_tps_map, - tys: [@ast::ty], + tys: [@ast::ty]/~, span: span, -s: @ast::expr, - pfn: fn([@ast::pat]) -> ast::pat_, + pfn: fn([@ast::pat]/~) -> ast::pat_, bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr, argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr) -> ast::arm { @@ -326,9 +328,9 @@ fn ser_variant(cx: ext_ctxt, }; let body_blk = cx.blk(span, stmts); - let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]); + let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]/~); - {pats: [pat], guard: none, body: body} + {pats: [pat]/~, guard: none, body: body} } fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty, @@ -338,34 +340,34 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty, fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty, -s: @ast::expr, -v: @ast::expr) - -> [@ast::stmt] { + -> [@ast::stmt]/~ { let ext_cx = cx; // required for #ast{} alt ty.node { ast::ty_nil { - [#ast[stmt]{$(s).emit_nil()}] + [#ast[stmt]{$(s).emit_nil()}]/~ } ast::ty_bot { cx.span_err( ty.span, #fmt["Cannot serialize bottom type"]); - [] + []/~ } ast::ty_box(mt) { let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) }); - [#ast(stmt){$(s).emit_box($(l));}] + [#ast(stmt){$(s).emit_box($(l));}]/~ } ast::ty_uniq(mt) { let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) }); - [#ast(stmt){$(s).emit_uniq($(l));}] + [#ast(stmt){$(s).emit_uniq($(l));}]/~ } ast::ty_ptr(_) | ast::ty_rptr(_, _) { cx.span_err(ty.span, "cannot serialize pointer types"); - [] + []/~ } ast::ty_rec(flds) { @@ -374,7 +376,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, let vf = cx.expr(fld.span, ast::expr_field(cx.clone(v), fld.node.ident, - [])); + []/~)); let s = cx.clone(s); let f = cx.lit_str(fld.span, fld.node.ident); let i = cx.lit_uint(fld.span, fidx); @@ -382,12 +384,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, #ast(stmt){$(s).emit_rec_field($(f), $(i), $(l));} }; let fld_lambda = cx.lambda(cx.blk(ty.span, fld_stmts)); - [#ast(stmt){$(s).emit_rec($(fld_lambda));}] + [#ast(stmt){$(s).emit_rec($(fld_lambda));}]/~ } ast::ty_fn(_, _) { cx.span_err(ty.span, "cannot serialize function types"); - [] + []/~ } ast::ty_tup(tys) { @@ -420,8 +422,8 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, let body = cx.lambda(blk); #ast{ $(s).emit_tup_elt($(idx), $(body)) } }) - ]; - [cx.alt_stmt(arms, ty.span, v)] + ]/~; + [cx.alt_stmt(arms, ty.span, v)]/~ } ast::ty_path(path, _) { @@ -444,12 +446,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, ast::ty_mac(_) { cx.span_err(ty.span, "cannot serialize macro types"); - [] + []/~ } ast::ty_infer { cx.span_err(ty.span, "cannot serialize inferred types"); - [] + []/~ } ast::ty_vstore(@{node: ast::ty_vec(mt),_}, ast::vstore_uniq) | @@ -467,7 +469,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, [#ast(stmt){ std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) }) - }] + }]/~ } ast::ty_vstore(_, _) { @@ -477,20 +479,21 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, } } -fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param], +fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, + tps: [ast::ty_param]/~, f: fn(ext_ctxt, ser_tps_map, - -@ast::expr, -@ast::expr) -> [@ast::stmt]) + -@ast::expr, -@ast::expr) -> [@ast::stmt]/~) -> @ast::item { let ext_cx = cx; // required for #ast - let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])}); - let v_ty = cx.ty_path(span, [name], tp_types); + let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)}); + let v_ty = cx.ty_path(span, [name]/~, tp_types); let tp_inputs = vec::map(tps, {|tp| {mode: ast::expl(ast::by_ref), ty: cx.ty_fn(span, - [cx.ty_path(span, [tp.ident], [])], + [cx.ty_path(span, [tp.ident]/~, []/~)]/~, cx.ty_nil(span)), ident: @("__s" + *tp.ident), id: cx.next_id()}}); @@ -498,15 +501,15 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param], #debug["tp_inputs = %?", tp_inputs]; - let ser_inputs: [ast::arg] = + let ser_inputs: [ast::arg]/~ = [{mode: ast::expl(ast::by_ref), - ty: cx.ty_path(span, [@"__S"], []), + ty: cx.ty_path(span, [@"__S"]/~, []/~), ident: @"__s", id: cx.next_id()}, {mode: ast::expl(ast::by_ref), ty: v_ty, ident: @"__v", - id: cx.next_id()}] + id: cx.next_id()}]/~ + tp_inputs; let tps_map = map::str_hash(); @@ -514,22 +517,23 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param], let arg_ident = arg.ident; tps_map.insert( *tp.ident, - fn@(v: @ast::expr) -> [@ast::stmt] { + fn@(v: @ast::expr) -> [@ast::stmt]/~ { let f = cx.var_ref(span, arg_ident); #debug["serializing type arg %s", *arg_ident]; - [#ast(stmt){$(f)($(v));}] + [#ast(stmt){$(f)($(v));}]/~ }); } let ser_bnds = @[ ast::bound_iface(cx.ty_path(span, - [@"std", @"serialization", @"serializer"], - []))]; + [@"std", @"serialization", + @"serializer"]/~, + []/~))]/~; - let ser_tps: [ast::ty_param] = + let ser_tps: [ast::ty_param]/~ = [{ident: @"__S", id: cx.next_id(), - bounds: ser_bnds}] + + bounds: ser_bnds}]/~ + vec::map(tps) {|tp| cx.clone_ty_param(tp) }; let ser_output: @ast::ty = @{id: cx.next_id(), @@ -540,13 +544,13 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param], f(cx, tps_map, #ast{ __s }, #ast{ __v })); @{ident: @("serialize_" + *name), - attrs: [], + attrs: []/~, id: cx.next_id(), node: ast::item_fn({inputs: ser_inputs, output: ser_output, purity: ast::impure_fn, cf: ast::return_val, - constraints: []}, + constraints: []/~}, ser_tps, ser_blk), vis: ast::public, @@ -571,7 +575,7 @@ fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path, cx.lambda(cx.expr_blk(dv_expr)) }; - cx.expr(path.span, ast::expr_call(callee, [d] + ty_args, false)) + cx.expr(path.span, ast::expr_call(callee, [d]/~ + ty_args, false)) } fn deser_lambda(cx: ext_ctxt, tps: deser_tps_map, ty: @ast::ty, @@ -688,30 +692,30 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, } fn mk_deser_fn(cx: ext_ctxt, span: span, - name: ast::ident, tps: [ast::ty_param], + name: ast::ident, tps: [ast::ty_param]/~, f: fn(ext_ctxt, deser_tps_map, -@ast::expr) -> @ast::expr) -> @ast::item { let ext_cx = cx; // required for #ast - let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])}); - let v_ty = cx.ty_path(span, [name], tp_types); + let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)}); + let v_ty = cx.ty_path(span, [name]/~, tp_types); let tp_inputs = vec::map(tps, {|tp| {mode: ast::expl(ast::by_ref), ty: cx.ty_fn(span, - [], - cx.ty_path(span, [tp.ident], [])), + []/~, + cx.ty_path(span, [tp.ident]/~, []/~)), ident: @("__d" + *tp.ident), id: cx.next_id()}}); #debug["tp_inputs = %?", tp_inputs]; - let deser_inputs: [ast::arg] = + let deser_inputs: [ast::arg]/~ = [{mode: ast::expl(ast::by_ref), - ty: cx.ty_path(span, [@"__D"], []), + ty: cx.ty_path(span, [@"__D"]/~, []/~), ident: @"__d", - id: cx.next_id()}] + id: cx.next_id()}]/~ + tp_inputs; let tps_map = map::str_hash(); @@ -728,46 +732,47 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, let deser_bnds = @[ ast::bound_iface(cx.ty_path( span, - [@"std", @"serialization", @"deserializer"], - []))]; + [@"std", @"serialization", @"deserializer"]/~, + []/~))]/~; - let deser_tps: [ast::ty_param] = + let deser_tps: [ast::ty_param]/~ = [{ident: @"__D", id: cx.next_id(), - bounds: deser_bnds}] + vec::map(tps) {|tp| + bounds: deser_bnds}]/~ + vec::map(tps) {|tp| let cloned = cx.clone_ty_param(tp); - {bounds: @(*cloned.bounds + [ast::bound_copy]) with cloned} + {bounds: @(*cloned.bounds + [ast::bound_copy]/~) with cloned} }; let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d})); @{ident: @("deserialize_" + *name), - attrs: [], + attrs: []/~, id: cx.next_id(), node: ast::item_fn({inputs: deser_inputs, output: v_ty, purity: ast::impure_fn, cf: ast::return_val, - constraints: []}, + constraints: []/~}, deser_tps, deser_blk), vis: ast::public, span: span} } -fn ty_fns(cx: ext_ctxt, name: ast::ident, ty: @ast::ty, tps: [ast::ty_param]) - -> [@ast::item] { +fn ty_fns(cx: ext_ctxt, name: ast::ident, + ty: @ast::ty, tps: [ast::ty_param]/~) + -> [@ast::item]/~ { let span = ty.span; [ mk_ser_fn(cx, span, name, tps, {|a,b,c,d|ser_ty(a, b, ty, c, d)}), mk_deser_fn(cx, span, name, tps, {|a,b,c|deser_ty(a, b, ty, c)}) - ] + ]/~ } fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, - e_span: span, variants: [ast::variant], - -s: @ast::expr, -v: @ast::expr) -> [@ast::stmt] { + e_span: span, variants: [ast::variant]/~, + -s: @ast::expr, -v: @ast::expr) -> [@ast::stmt]/~ { let ext_cx = cx; let arms = vec::from_fn(vec::len(variants)) {|vidx| let variant = variants[vidx]; @@ -781,9 +786,9 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, // Generate pattern var(v1, v2, v3) {|pats| if vec::is_empty(pats) { - ast::pat_ident(cx.path(v_span, [v_name]), none) + ast::pat_ident(cx.path(v_span, [v_name]/~), none) } else { - ast::pat_enum(cx.path(v_span, [v_name]), some(pats)) + ast::pat_enum(cx.path(v_span, [v_name]/~), some(pats)) } }, @@ -809,16 +814,16 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, } }) }; - let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)])); + let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]/~)); let e_name = cx.lit_str(e_span, e_name); - [#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }] + [#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]/~ } fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, - e_span: span, variants: [ast::variant], + e_span: span, variants: [ast::variant]/~, -d: @ast::expr) -> @ast::expr { let ext_cx = cx; - let arms: [ast::arm] = vec::from_fn(vec::len(variants)) {|vidx| + let arms: [ast::arm]/~ = vec::from_fn(vec::len(variants)) {|vidx| let variant = variants[vidx]; let v_span = variant.span; let v_name = variant.node.name; @@ -843,7 +848,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, {pats: [@{id: cx.next_id(), node: ast::pat_lit(cx.lit_uint(v_span, vidx)), - span: v_span}], + span: v_span}]/~, guard: none, body: cx.expr_blk(body)} }; @@ -859,12 +864,12 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, } fn enum_fns(cx: ext_ctxt, e_name: ast::ident, e_span: span, - variants: [ast::variant], tps: [ast::ty_param]) - -> [@ast::item] { + variants: [ast::variant]/~, tps: [ast::ty_param]/~) + -> [@ast::item]/~ { [ mk_ser_fn(cx, e_span, e_name, tps, {|a,b,c,d|ser_enum(a, b, e_name, e_span, variants, c, d)}), mk_deser_fn(cx, e_span, e_name, tps, {|a,b,c|deser_enum(a, b, e_name, e_span, variants, c)}) - ] + ]/~ } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 6c93dbcd7ef..96a1efdfe7a 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -12,7 +12,7 @@ type macro_def = {ident: ast::ident, ext: syntax_extension}; type macro_definer = fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def; type item_decorator = - fn@(ext_ctxt, span, ast::meta_item, [@ast::item]) -> [@ast::item]; + fn@(ext_ctxt, span, ast::meta_item, [@ast::item]/~) -> [@ast::item]/~; type syntax_expander_tt = {expander: syntax_expander_tt_, span: option}; type syntax_expander_tt_ = fn@(ext_ctxt, span, ast::token_tree) -> @ast::expr; @@ -72,7 +72,7 @@ iface ext_ctxt { fn backtrace() -> expn_info; fn mod_push(mod_name: ast::ident); fn mod_pop(); - fn mod_path() -> [ast::ident]; + fn mod_path() -> [ast::ident]/~; fn bt_push(ei: codemap::expn_info_); fn bt_pop(); fn span_fatal(sp: span, msg: str) -> !; @@ -88,7 +88,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, type ctxt_repr = {parse_sess: parse::parse_sess, cfg: ast::crate_cfg, mut backtrace: expn_info, - mut mod_path: [ast::ident]}; + mut mod_path: [ast::ident]/~}; impl of ext_ctxt for ctxt_repr { fn codemap() -> codemap { self.parse_sess.cm } fn parse_sess() -> parse::parse_sess { self.parse_sess } @@ -97,7 +97,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn backtrace() -> expn_info { self.backtrace } fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); } fn mod_pop() { vec::pop(self.mod_path); } - fn mod_path() -> [ast::ident] { ret self.mod_path; } + fn mod_path() -> [ast::ident]/~ { ret self.mod_path; } fn bt_push(ei: codemap::expn_info_) { alt ei { expanded_from({call_site: cs, callie: callie}) { @@ -145,7 +145,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, parse_sess: parse_sess, cfg: cfg, mut backtrace: none, - mut mod_path: [] + mut mod_path: []/~ }; ret imp as ext_ctxt } @@ -185,12 +185,12 @@ fn make_new_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) -> } fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg, - min: uint, name: str) -> [@ast::expr] { + min: uint, name: str) -> [@ast::expr]/~ { ret get_mac_args(cx, sp, arg, min, none, name); } fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg, - min: uint, max: option, name: str) -> [@ast::expr] { + min: uint, max: option, name: str) -> [@ast::expr]/~ { alt arg { some(expr) { alt expr.node { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 4e0c6889092..2e5fe63eb5a 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -28,35 +28,35 @@ fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr) let expr = ast::expr_unary(op, e); ret @{id: cx.next_id(), node: expr, span: sp}; } -fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]) -> +fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]/~) -> @ast::expr { let path = @{span: sp, global: false, idents: idents, - rp: none, types: []}; + rp: none, types: []/~}; let pathexpr = ast::expr_path(path); ret @{id: cx.next_id(), node: pathexpr, span: sp}; } fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident) -> @ast::expr { - let expr = ast::expr_field(p, m, []); + let expr = ast::expr_field(p, m, []/~); ret @{id: cx.next_id(), node: expr, span: sp}; } -fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident], m: ast::ident) +fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident]/~, m: ast::ident) -> @ast::expr { let pathexpr = mk_path(cx, sp, p); ret mk_access_(cx, sp, pathexpr, m); } fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr, - args: [@ast::expr]) -> @ast::expr { + args: [@ast::expr]/~) -> @ast::expr { let callexpr = ast::expr_call(fn_expr, args, false); ret @{id: cx.next_id(), node: callexpr, span: sp}; } -fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident], - args: [@ast::expr]) -> @ast::expr { +fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident]/~, + args: [@ast::expr]/~) -> @ast::expr { let pathexpr = mk_path(cx, sp, fn_path); ret mk_call_(cx, sp, pathexpr, args); } // e = expr, t = type -fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) -> +fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) -> @ast::expr { let vecexpr = ast::expr_vec(exprs, ast::m_imm); ret @{id: cx.next_id(), node: vecexpr, span: sp}; @@ -72,15 +72,15 @@ fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) -> } fn mk_rec_e(cx: ext_ctxt, sp: span, - fields: [{ident: ast::ident, ex: @ast::expr}]) -> + fields: [{ident: ast::ident, ex: @ast::expr}]/~) -> @ast::expr { - let mut astfields: [ast::field] = []; + let mut astfields: [ast::field]/~ = []/~; for fields.each {|field| let ident = field.ident; let val = field.ex; let astfield = {node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp}; - astfields += [astfield]; + astfields += [astfield]/~; } let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>); ret @{id: cx.next_id(), node: recexpr, span: sp}; diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index faf8e1a0868..9f445218007 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -9,7 +9,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, } ret @{id: cx.next_id(), - node: ast::expr_path(@{span: sp, global: false, idents: [@res], - rp: none, types: []}), + node: ast::expr_path(@{span: sp, global: false, idents: [@res]/~, + rp: none, types: []/~}), span: sp}; } diff --git a/src/libsyntax/ext/earley_parser.rs b/src/libsyntax/ext/earley_parser.rs index b1a2524ca35..223cca25694 100644 --- a/src/libsyntax/ext/earley_parser.rs +++ b/src/libsyntax/ext/earley_parser.rs @@ -32,11 +32,11 @@ fn is_some(&&mpu: matcher_pos_up) -> bool { } type matcher_pos = ~{ - elts: [ast::matcher], // maybe should be /& ? Need to understand regions. + elts: [ast::matcher]/~, // maybe should be /&? Need to understand regions. sep: option, mut idx: uint, mut up: matcher_pos_up, // mutable for swapping only - matches: [dvec<@arb_depth>] + matches: [dvec<@arb_depth>]/~ }; fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { @@ -55,26 +55,26 @@ fn count_names(ms: [matcher]/&) -> uint { }}) } -fn new_matcher_pos(ms: [matcher], sep: option) -> matcher_pos { +fn new_matcher_pos(ms: [matcher]/~, sep: option) -> matcher_pos { ~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(none), matches: copy vec::from_fn(count_names(ms), {|_i| dvec::dvec()}) } } /* logically, an arb_depth should contain only one kind of nonterminal */ -enum arb_depth { leaf(whole_nt), seq([@arb_depth]) } +enum arb_depth { leaf(whole_nt), seq([@arb_depth]/~) } type earley_item = matcher_pos; -fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher]) - -> [@arb_depth] { - let mut cur_eis = []; +fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher]/~) + -> [@arb_depth]/~ { + let mut cur_eis = []/~; vec::push(cur_eis, new_matcher_pos(ms, none)); loop { - let mut bb_eis = []; // black-box parsed by parser.rs - let mut next_eis = []; // or proceed normally - let mut eof_eis = []; + let mut bb_eis = []/~; // black-box parsed by parser.rs + let mut next_eis = []/~; // or proceed normally + let mut eof_eis = []/~; let {tok: tok, sp: _} = rdr.peek(); @@ -218,12 +218,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher]) fn parse_nt(p: parser, name: str) -> whole_nt { alt name { - "item" { alt p.parse_item([], ast::public) { + "item" { alt p.parse_item([]/~, ast::public) { some(i) { token::w_item(i) } none { p.fatal("expected an item keyword") } }} "block" { token::w_block(p.parse_block()) } - "stmt" { token::w_stmt(p.parse_stmt([])) } + "stmt" { token::w_stmt(p.parse_stmt([]/~)) } "pat" { token::w_pat(p.parse_pat()) } "expr" { token::w_expr(p.parse_expr()) } "ty" { token::w_ty(p.parse_ty(false /* no need to disambiguate*/)) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d7bb7835822..ca5d7f6bab3 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -45,7 +45,7 @@ fn expand_expr(exts: hashmap, cx: ext_ctxt, some(macro_defining(ext)) { let named_extension = ext(cx, pth.span, args, body); exts.insert(*named_extension.ident, named_extension.ext); - (ast::expr_rec([], none), s) + (ast::expr_rec([]/~, none), s) } some(normal_tt(_)) { cx.span_fatal(pth.span, @@ -101,7 +101,7 @@ fn expand_mod_items(exts: hashmap, cx: ext_ctxt, // decorated with "item decorators", then use that function to transform // the item into a new set of items. let new_items = vec::flat_map(module.items) {|item| - vec::foldr(item.attrs, [item]) {|attr, items| + vec::foldr(item.attrs, [item]/~) {|attr, items| let mname = alt attr.node.value.node { ast::meta_word(n) { n } ast::meta_name_value(n, _) { n } diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 24a5aed7d28..43408dec739 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -34,10 +34,11 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg, // probably be factored out in common with other code that builds // expressions. Also: Cleanup the naming of these functions. // NOTE: Moved many of the common ones to build.rs --kevina -fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) +fn pieces_to_expr(cx: ext_ctxt, sp: span, + pieces: [piece]/~, args: [@ast::expr]/~) -> @ast::expr { - fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident] { - ret [@"extfmt", @"rt", ident]; + fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident]/~ { + ret [@"extfmt", @"rt", ident]/~; } fn make_rt_path_expr(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr { @@ -48,8 +49,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) // which tells the RT::conv* functions how to perform the conversion fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr { - fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr { - let mut flagexprs: [@ast::expr] = []; + fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]/~) -> @ast::expr { + let mut flagexprs: [@ast::expr]/~ = []/~; for flags.each {|f| let mut fstr; alt f { @@ -59,7 +60,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) flag_sign_always { fstr = "flag_sign_always"; } flag_alternate { fstr = "flag_alternate"; } } - flagexprs += [make_rt_path_expr(cx, sp, @fstr)]; + flagexprs += [make_rt_path_expr(cx, sp, @fstr)]/~; } ret mk_uniq_vec_e(cx, sp, flagexprs); } @@ -71,7 +72,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) count_is(c) { let count_lit = mk_int(cx, sp, c); let count_is_path = make_path_vec(cx, @"count_is"); - let count_is_args = [count_lit]; + let count_is_args = [count_lit]/~; ret mk_call(cx, sp, count_is_path, count_is_args); } _ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); } @@ -99,7 +100,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) [{ident: @"flags", ex: flags_expr}, {ident: @"width", ex: width_expr}, {ident: @"precision", ex: precision_expr}, - {ident: @"ty", ex: ty_expr}]); + {ident: @"ty", ex: ty_expr}]/~); } let rt_conv_flags = make_flags(cx, sp, cnv.flags); let rt_conv_width = make_count(cx, sp, cnv.width); @@ -113,7 +114,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr]) let fname = "conv_" + conv_type; let path = make_path_vec(cx, @fname); let cnv_expr = make_rt_conv_expr(cx, sp, cnv); - let args = [cnv_expr, arg]; + let args = [cnv_expr, arg]/~; ret mk_call(cx, arg.span, path, args); } fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) -> diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 5ccbb143b97..50fac765483 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -11,5 +11,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, ); //trivial expression - ret @{id: cx.next_id(), node: ast::expr_rec([], option::none), span: sp}; + ret @{id: cx.next_id(), node: ast::expr_rec([]/~, option::none), + span: sp}; } diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index caef1841faf..648532d3024 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -35,7 +35,7 @@ impl of qq_helper for @ast::crate { fn visit(cx: aq_ctxt, v: vt) {visit_crate(*self, cx, v);} fn extract_mac() -> option {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"]/~) } fn get_fold_fn() -> str {"fold_crate"} } @@ -49,7 +49,7 @@ impl of qq_helper for @ast::expr { } } fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"]/~) } fn get_fold_fn() -> str {"fold_expr"} } @@ -63,7 +63,7 @@ impl of qq_helper for @ast::ty { } } fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"]/~) } fn get_fold_fn() -> str {"fold_ty"} } @@ -72,7 +72,7 @@ impl of qq_helper for @ast::item { fn visit(cx: aq_ctxt, v: vt) {visit_item(self, cx, v);} fn extract_mac() -> option {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"]/~) } fn get_fold_fn() -> str {"fold_item"} } @@ -81,7 +81,7 @@ impl of qq_helper for @ast::stmt { fn visit(cx: aq_ctxt, v: vt) {visit_stmt(self, cx, v);} fn extract_mac() -> option {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"]/~) } fn get_fold_fn() -> str {"fold_stmt"} } @@ -90,7 +90,7 @@ impl of qq_helper for @ast::pat { fn visit(cx: aq_ctxt, v: vt) {visit_pat(self, cx, v);} fn extract_mac() -> option {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"]) + mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"]/~) } fn get_fold_fn() -> str {"fold_pat"} } @@ -133,12 +133,12 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, { let mut what = "expr"; option::iter(arg) {|arg| - let args: [@ast::expr] = + let args: [@ast::expr]/~ = alt arg.node { ast::expr_vec(elts, _) { elts } _ { ecx.span_fatal - (_sp, "#ast requires arguments of the form `[...]`.") + (_sp, "#ast requires arguments of the form `[...]/~`.") } }; if vec::len::<@ast::expr>(args) != 1u { @@ -163,14 +163,14 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, }; } -fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) } +fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]/~) } fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) } -fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) } +fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]/~) } fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() } fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() } fn parse_item(p: parser) -> @ast::item { - alt p.parse_item([], ast::public) { + alt p.parse_item([]/~, ast::public) { some(item) { item } none { fail "parse_item: parsing an item failed"; } } @@ -230,47 +230,48 @@ fn finish let cx = ecx; let cfg_call = {|| - mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"cfg"), []) + mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"]/~, @"cfg"), []/~) }; let parse_sess_call = {|| - mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"parse_sess"), []) + mk_call_(cx, sp, + mk_access(cx, sp, [@"ext_cx"]/~, @"parse_sess"), []/~) }; let pcall = mk_call(cx,sp, [@"syntax", @"parse", @"parser", - @"parse_from_source_str"], + @"parse_from_source_str"]/~, [node.mk_parse_fn(cx,sp), mk_str(cx,sp, fname), mk_call(cx,sp, [@"syntax",@"ext", - @"qquote", @"mk_file_substr"], + @"qquote", @"mk_file_substr"]/~, [mk_str(cx,sp, loc.file.name), mk_uint(cx,sp, loc.line), - mk_uint(cx,sp, loc.col)]), + mk_uint(cx,sp, loc.col)]/~), mk_unary(cx,sp, ast::box(ast::m_imm), mk_str(cx,sp, str2)), cfg_call(), - parse_sess_call()] + parse_sess_call()]/~ ); let mut rcall = pcall; if (g_len > 0u) { rcall = mk_call(cx,sp, - [@"syntax", @"ext", @"qquote", @"replace"], + [@"syntax", @"ext", @"qquote", @"replace"]/~, [pcall, mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec {|g| mk_call(cx,sp, [@"syntax", @"ext", - @"qquote", @g.constr], - [g.e])}), + @"qquote", @g.constr]/~, + [g.e]/~)}), mk_path(cx,sp, [@"syntax", @"ext", @"qquote", - @node.get_fold_fn()])]); + @node.get_fold_fn()]/~)]/~); } ret rcall; } -fn replace(node: T, repls: [fragment], ff: fn (ast_fold, T) -> T) +fn replace(node: T, repls: [fragment]/~, ff: fn (ast_fold, T) -> T) -> T { let aft = default_ast_fold(); @@ -290,7 +291,7 @@ fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {f.fold_item(n)} fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)} fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)} -fn replace_expr(repls: [fragment], +fn replace_expr(repls: [fragment]/~, e: ast::expr_, s: span, fld: ast_fold, orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span)) -> (ast::expr_, span) @@ -304,7 +305,7 @@ fn replace_expr(repls: [fragment], } } -fn replace_ty(repls: [fragment], +fn replace_ty(repls: [fragment]/~, e: ast::ty_, s: span, fld: ast_fold, orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span)) -> (ast::ty_, span) diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index 4dad13dc06a..c29b2246993 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -22,7 +22,7 @@ fn path_to_ident(pth: @path) -> option { type clause = {params: binders, body: @expr}; /* logically, an arb_depth should contain only one kind of matchable */ -enum arb_depth { leaf(T), seq(@[arb_depth], span), } +enum arb_depth { leaf(T), seq(@[arb_depth]/~, span), } enum matchable { @@ -70,8 +70,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! { type match_result = option>; type selector = fn@(matchable) -> match_result; -fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) -> - {pre: [@expr], rep: option<@expr>, post: [@expr]} { +fn elts_to_ell(cx: ext_ctxt, elts: [@expr]/~) -> + {pre: [@expr]/~, rep: option<@expr>, post: [@expr]/~} { let mut idx: uint = 0u; let mut res = none; for elts.each {|elt| @@ -96,15 +96,15 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) -> } ret alt res { some(val) { val } - none { {pre: elts, rep: none, post: []} } + none { {pre: elts, rep: none, post: []/~} } } } -fn option_flatten_map(f: fn@(T) -> option, v: [T]) -> - option<[U]> { - let mut res = []; +fn option_flatten_map(f: fn@(T) -> option, v: [T]/~) -> + option<[U]/~> { + let mut res = []/~; for v.each {|elem| - alt f(elem) { none { ret none; } some(fv) { res += [fv]; } } + alt f(elem) { none { ret none; } some(fv) { res += [fv]/~; } } } ret some(res); } @@ -182,7 +182,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option { /* use the bindings on the body to generate the expanded code */ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { - let idx_path: @mut [uint] = @mut []; + let idx_path: @mut [uint]/~ = @mut []/~; fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); } fn new_span(cx: ext_ctxt, sp: span) -> span { /* this discards information in the case of macro-defining macros */ @@ -214,7 +214,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { /* helper: descend into a matcher */ -fn follow(m: arb_depth, idx_path: @mut [uint]) -> +fn follow(m: arb_depth, idx_path: @mut [uint]/~) -> arb_depth { let mut res: arb_depth = m; for vec::each(*idx_path) {|idx| @@ -227,7 +227,7 @@ fn follow(m: arb_depth, idx_path: @mut [uint]) -> } fn follow_for_trans(cx: ext_ctxt, mmaybe: option>, - idx_path: @mut [uint]) -> option { + idx_path: @mut [uint]/~) -> option { alt mmaybe { none { ret none } some(m) { @@ -264,8 +264,9 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) { /* handle sequences (anywhere in the AST) of exprs, either real or ...ed */ -fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], - recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] { +fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, + recur: fn@(&&@expr) -> @expr, + exprs: [@expr]/~) -> [@expr]/~ { alt elts_to_ell(cx, exprs) { {pre: pre, rep: repeat_me_maybe, post: post} { let mut res = vec::map(pre, recur); @@ -308,8 +309,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], /* Whew, we now know how how many times to repeat */ let mut idx: uint = 0u; while idx < rc { - *idx_path += [idx]; - res += [recur(repeat_me)]; // whew! + *idx_path += [idx]/~; + res += [recur(repeat_me)]/~; // whew! vec::pop(*idx_path); idx += 1u; } @@ -326,7 +327,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], // substitute, in a position that's required to be an ident -fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], +fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, &&i: ident, _fld: ast_fold) -> ident { ret alt follow_for_trans(cx, b.find(i), idx_path) { some(match_ident(a_id)) { a_id.node } @@ -336,14 +337,14 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], } -fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], +fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, p: path, _fld: ast_fold) -> path { // Don't substitute into qualified names. if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret p; } alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) { some(match_ident(id)) { - {span: id.span, global: false, idents: [id.node], - rp: none, types: []} + {span: id.span, global: false, idents: [id.node]/~, + rp: none, types: []/~} } some(match_path(a_pth)) { *a_pth } some(m) { match_error(cx, m, "a path") } @@ -352,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], } -fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], +fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, e: ast::expr_, s: span, fld: ast_fold, orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span)) -> (ast::expr_, span) @@ -367,9 +368,9 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], some(match_ident(id)) { (expr_path(@{span: id.span, global: false, - idents: [id.node], + idents: [id.node]/~, rp: none, - types: []}), id.span) + types: []/~}), id.span) } some(match_path(a_pth)) { (expr_path(a_pth), s) } some(match_expr(a_exp)) { (a_exp.node, a_exp.span) } @@ -381,7 +382,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], } } -fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], +fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, t: ast::ty_, s: span, fld: ast_fold, orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span)) -> (ast::ty_, span) @@ -407,7 +408,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], /* for parsing reasons, syntax variables bound to blocks must be used like `{v}` */ -fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint], +fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~, blk: blk_, s: span, fld: ast_fold, orig: fn@(blk_, span, ast_fold) -> (blk_, span)) -> (blk_, span) @@ -458,7 +459,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { } } {pre: pre, rep: none, post: post} { - if post != [] { + if post != []/~ { cx.bug("elts_to_ell provided an invalid result"); } p_t_s_r_length(cx, vec::len(pre), false, s, b); @@ -606,10 +607,10 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector, match_expr(e) { alt e.node { expr_vec(arg_elts, _) { - let mut elts = []; + let mut elts = []/~; let mut idx = offset; while idx < vec::len(arg_elts) { - elts += [leaf(match_expr(arg_elts[idx]))]; + vec::push(elts, leaf(match_expr(arg_elts[idx]))); idx += 1u; } @@ -651,7 +652,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector, compose_sels(s, {|x|len_select(cx, x, at_least, len)})); } -fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool, +fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr]/~, _repeat_after: bool, s: selector, b: binders) { let mut idx: uint = 0u; while idx < vec::len(elts) { @@ -679,14 +680,14 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro"); let mut macro_name: option<@str> = none; - let mut clauses: [@clause] = []; + let mut clauses: [@clause]/~ = []/~; for args.each {|arg| alt arg.node { expr_vec(elts, mutbl) { if vec::len(elts) != 2u { cx.span_fatal((*arg).span, "extension clause must consist of [" + - "macro invocation, expansion body]"); + "macro invocation, expansion body]/~"); } @@ -719,7 +720,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, }; clauses += [@{params: pattern_to_selectors(cx, arg), - body: elts[1u]}]; + body: elts[1u]}]/~; // FIXME (#2251): check duplicates (or just simplify // the macro arg situation) @@ -739,7 +740,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, } _ { cx.span_fatal((*arg).span, - "extension must be [clause, " + " ...]"); + "extension must be [clause, " + " ...]/~"); } } } @@ -759,7 +760,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, ext: normal({expander: ext, span: some(option::get(arg).span)})}; fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, - _body: ast::mac_body, clauses: [@clause]) -> @expr { + _body: ast::mac_body, + clauses: [@clause]/~) -> @expr { let arg = alt arg { some(arg) { arg } none { cx.span_fatal(sp, "macro must have arguments")} diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 492210a55b8..d1ee7a16a47 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -41,7 +41,7 @@ iface ast_fold { fn fold_ident(&&ident) -> ident; fn fold_path(&&@path) -> @path; fn fold_local(&&@local) -> @local; - fn map_exprs(fn@(&&@expr) -> @expr, [@expr]) -> [@expr]; + fn map_exprs(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~; fn new_id(node_id) -> node_id; fn new_span(span) -> span; } @@ -75,7 +75,7 @@ type ast_fold_precursor = @{ fold_ident: fn@(&&ident, ast_fold) -> ident, fold_path: fn@(path, ast_fold) -> path, fold_local: fn@(local_, span, ast_fold) -> (local_, span), - map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]) -> [@expr], + map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~, new_id: fn@(node_id) -> node_id, new_span: fn@(span) -> span}; @@ -151,7 +151,7 @@ fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param { bounds: @vec::map(*tp.bounds, {|x|fold_ty_param_bound(x, fld)})} } -fn fold_ty_params(tps: [ty_param], fld: ast_fold) -> [ty_param] { +fn fold_ty_params(tps: [ty_param]/~, fld: ast_fold) -> [ty_param]/~ { vec::map(tps, {|x|fold_ty_param(x, fld)}) } @@ -335,10 +335,11 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ { {|pats| vec::map(pats, fld.fold_pat)}) } pat_rec(fields, etc) { - let mut fs = []; + let mut fs = []/~; for fields.each {|f| - fs += [{ident: /* FIXME (#2543) */ copy f.ident, - pat: fld.fold_pat(f.pat)}]; + vec::push(fs, + {ident: /* FIXME (#2543) */ copy f.ident, + pat: fld.fold_pat(f.pat)}); } pat_rec(fs, etc) } @@ -570,7 +571,7 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ { /* temporarily eta-expand because of a compiler bug with using `fn` as a value */ -fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]) -> [@expr] { +fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]/~) -> [@expr]/~ { ret vec::map(es, f); } @@ -717,7 +718,7 @@ impl of ast_fold for ast_fold_precursor { let (n, s) = self.fold_local(x.node, x.span, self as ast_fold); ret @{node: n, span: self.new_span(s)}; } - fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]) -> [@expr] { + fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]/~) -> [@expr]/~ { self.map_exprs(f, e) } fn new_id(node_id: ast::node_id) -> node_id { diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index d062f4bde6d..2e309f2bd14 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -119,7 +119,8 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, } fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, - +attrs: [ast::attribute], vis: ast::visibility, + +attrs: [ast::attribute]/~, + vis: ast::visibility, sess: parse_sess) -> option<@ast::item> { let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, codemap::fss_none, source); @@ -197,7 +198,7 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str, } fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg, - tt: [ast::token_tree]) -> parser { + tt: [ast::token_tree]/~) -> parser { let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, tt); ret parser(sess, cfg, trdr as reader, parser::SOURCE_FILE) } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index dad180847ee..4d78bcdc0a9 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -7,11 +7,11 @@ export parser_attr; // A type to distingush between the parsing of item attributes or syntax // extensions, which both begin with token.POUND -type attr_or_ext = option>; +type attr_or_ext = option>; impl parser_attr for parser { - fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute]) + fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute]/~) -> attr_or_ext { let expect_item_next = vec::is_not_empty(first_item_attrs); @@ -21,7 +21,8 @@ impl parser_attr for parser { self.bump(); let first_attr = self.parse_attribute_naked(ast::attr_outer, lo); - ret some(left([first_attr] + self.parse_outer_attributes())); + ret some(left([first_attr]/~ + + self.parse_outer_attributes())); } else if !(self.look_ahead(1u) == token::LT || self.look_ahead(1u) == token::LBRACKET || self.look_ahead(1u) == token::POUND @@ -33,11 +34,11 @@ impl parser_attr for parser { } // Parse attributes that appear before an item - fn parse_outer_attributes() -> [ast::attribute] { - let mut attrs: [ast::attribute] = []; + fn parse_outer_attributes() -> [ast::attribute]/~ { + let mut attrs: [ast::attribute]/~ = []/~; while self.token == token::POUND && self.look_ahead(1u) == token::LBRACKET { - attrs += [self.parse_attribute(ast::attr_outer)]; + attrs += [self.parse_attribute(ast::attr_outer)]/~; } ret attrs; } @@ -64,9 +65,9 @@ impl parser_attr for parser { // is an inner attribute of the containing item or an outer attribute of // the first contained item until we see the semi). fn parse_inner_attrs_and_next() -> - {inner: [ast::attribute], next: [ast::attribute]} { - let mut inner_attrs: [ast::attribute] = []; - let mut next_outer_attrs: [ast::attribute] = []; + {inner: [ast::attribute]/~, next: [ast::attribute]/~} { + let mut inner_attrs: [ast::attribute]/~ = []/~; + let mut next_outer_attrs: [ast::attribute]/~ = []/~; while self.token == token::POUND { if self.look_ahead(1u) != token::LBRACKET { // This is an extension @@ -75,13 +76,13 @@ impl parser_attr for parser { let attr = self.parse_attribute(ast::attr_inner); if self.token == token::SEMI { self.bump(); - inner_attrs += [attr]; + inner_attrs += [attr]/~; } else { // It's not really an inner attribute let outer_attr = spanned(attr.span.lo, attr.span.hi, {style: ast::attr_outer, value: attr.node.value}); - next_outer_attrs += [outer_attr]; + next_outer_attrs += [outer_attr]/~; break; } } @@ -110,15 +111,15 @@ impl parser_attr for parser { } } - fn parse_meta_seq() -> [@ast::meta_item] { + fn parse_meta_seq() -> [@ast::meta_item]/~ { ret self.parse_seq(token::LPAREN, token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), {|p| p.parse_meta_item()}).node; } - fn parse_optional_meta() -> [@ast::meta_item] { + fn parse_optional_meta() -> [@ast::meta_item]/~ { alt self.token { token::LPAREN { ret self.parse_meta_seq(); } - _ { ret []; } } + _ { ret []/~; } } } } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 54d14f2eaf4..2f10a30bd55 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -16,7 +16,7 @@ enum cmnt_style { blank_line, // Just a manual blank line "\n\n", for layout } -type cmnt = {style: cmnt_style, lines: [str], pos: uint}; +type cmnt = {style: cmnt_style, lines: [str]/~, pos: uint}; fn read_to_eol(rdr: string_reader) -> str { let mut val = ""; @@ -41,14 +41,14 @@ fn consume_non_eol_whitespace(rdr: string_reader) { } } -fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]) { +fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]/~) { #debug(">>> blank-line comment"); - let v: [str] = []; - comments += [{style: blank_line, lines: v, pos: rdr.chpos}]; + let v: [str]/~ = []/~; + comments += [{style: blank_line, lines: v, pos: rdr.chpos}]/~; } fn consume_whitespace_counting_blank_lines(rdr: string_reader, - &comments: [cmnt]) { + &comments: [cmnt]/~) { while is_whitespace(rdr.curr) && !is_eof(rdr) { if rdr.col == 0u && rdr.curr == '\n' { push_blank_line_comment(rdr, comments); @@ -62,18 +62,18 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt { let p = rdr.chpos; #debug("<<< shebang comment"); ret {style: if code_to_the_left { trailing } else { isolated }, - lines: [read_one_line_comment(rdr)], + lines: [read_one_line_comment(rdr)]/~, pos: p}; } fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt { #debug(">>> line comments"); let p = rdr.chpos; - let mut lines: [str] = []; + let mut lines: [str]/~ = []/~; while rdr.curr == '/' && nextch(rdr) == '/' { let line = read_one_line_comment(rdr); log(debug, line); - lines += [line]; + lines += [line]/~; consume_non_eol_whitespace(rdr); } #debug("<<< line comments"); @@ -88,7 +88,7 @@ fn all_whitespace(s: str, begin: uint, end: uint) -> bool { ret true; } -fn trim_whitespace_prefix_and_push_line(&lines: [str], +fn trim_whitespace_prefix_and_push_line(&lines: [str]/~, s: str, col: uint) unsafe { let mut s1; let len = str::len(s); @@ -98,13 +98,13 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str], } else { s1 = ""; } } else { s1 = s; } log(debug, "pushing line: " + s1); - lines += [s1]; + lines += [s1]/~; } fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt { #debug(">>> block comment"); let p = rdr.chpos; - let mut lines: [str] = []; + let mut lines: [str]/~ = []/~; let mut col: uint = rdr.col; bump(rdr); bump(rdr); @@ -153,14 +153,14 @@ fn peeking_at_comment(rdr: string_reader) -> bool { } fn consume_comment(rdr: string_reader, code_to_the_left: bool, - &comments: [cmnt]) { + &comments: [cmnt]/~) { #debug(">>> consume comment"); if rdr.curr == '/' && nextch(rdr) == '/' { - comments += [read_line_comments(rdr, code_to_the_left)]; + comments += [read_line_comments(rdr, code_to_the_left)]/~; } else if rdr.curr == '/' && nextch(rdr) == '*' { - comments += [read_block_comment(rdr, code_to_the_left)]; + comments += [read_block_comment(rdr, code_to_the_left)]/~; } else if rdr.curr == '#' && nextch(rdr) == '!' { - comments += [read_shebang_comment(rdr, code_to_the_left)]; + comments += [read_shebang_comment(rdr, code_to_the_left)]/~; } else { fail; } #debug("<<< consume comment"); } @@ -170,7 +170,7 @@ type lit = {lit: str, pos: uint}; fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, path: str, srdr: io::reader) -> - {cmnts: [cmnt], lits: [lit]} { + {cmnts: [cmnt]/~, lits: [lit]/~} { let src = @str::from_bytes(srdr.read_whole_stream()); let itr = @interner::mk::<@str>( {|x|str::hash(*x)}, @@ -179,8 +179,8 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, let rdr = lexer::new_low_level_string_reader (span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr); - let mut comments: [cmnt] = []; - let mut literals: [lit] = []; + let mut comments: [cmnt]/~ = []/~; + let mut literals: [lit]/~ = []/~; let mut first_read: bool = true; while !is_eof(rdr) { loop { diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 1d92561a108..8cc6f3d6484 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -149,9 +149,9 @@ impl parser_common for parser { } fn parse_seq_to_before_gt(sep: option, - f: fn(parser) -> T) -> [T] { + f: fn(parser) -> T) -> [T]/~ { let mut first = true; - let mut v = []; + let mut v = []/~; while self.token != token::GT && self.token != token::BINOP(token::SHR) { alt sep { @@ -166,7 +166,7 @@ impl parser_common for parser { } fn parse_seq_to_gt(sep: option, - f: fn(parser) -> T) -> [T] { + f: fn(parser) -> T) -> [T]/~ { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); @@ -174,7 +174,7 @@ impl parser_common for parser { } fn parse_seq_lt_gt(sep: option, - f: fn(parser) -> T) -> spanned<[T]> { + f: fn(parser) -> T) -> spanned<[T]/~> { let lo = self.span.lo; self.expect(token::LT); let result = self.parse_seq_to_before_gt::(sep, f); @@ -184,7 +184,7 @@ impl parser_common for parser { } fn parse_seq_to_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> [T] { + f: fn(parser) -> T) -> [T]/~ { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); ret val; @@ -192,9 +192,9 @@ impl parser_common for parser { fn parse_seq_to_before_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> [T] { + f: fn(parser) -> T) -> [T]/~ { let mut first: bool = true; - let mut v: [T] = []; + let mut v: [T]/~ = []/~; while self.token != ket { alt sep.sep { some(t) { if first { first = false; } @@ -207,8 +207,10 @@ impl parser_common for parser { ret v; } - fn parse_unspanned_seq(bra: token::token, ket: token::token, - sep: seq_sep, f: fn(parser) -> T) -> [T] { + fn parse_unspanned_seq(bra: token::token, + ket: token::token, + sep: seq_sep, + f: fn(parser) -> T) -> [T]/~ { self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); self.bump(); @@ -218,7 +220,7 @@ impl parser_common for parser { // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. fn parse_seq(bra: token::token, ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> spanned<[T]> { + f: fn(parser) -> T) -> spanned<[T]/~> { let lo = self.span.lo; self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index ae11c883443..f1dd8d69cc1 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -7,24 +7,26 @@ type ctx = @{sess: parse::parse_sess, cfg: ast::crate_cfg}; -fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str, - &view_items: [@ast::view_item], - &items: [@ast::item]) { +fn eval_crate_directives(cx: ctx, + cdirs: [@ast::crate_directive]/~, + prefix: str, + &view_items: [@ast::view_item]/~, + &items: [@ast::item]/~) { for cdirs.each {|sub_cdir| eval_crate_directive(cx, sub_cdir, prefix, view_items, items); } } -fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive], +fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive]/~, prefix: str, suffix: option) - -> (ast::_mod, [ast::attribute]) { + -> (ast::_mod, [ast::attribute]/~) { #debug("eval crate prefix: %s", prefix); #debug("eval crate suffix: %s", option::get_default(suffix, "none")); let (cview_items, citems, cattrs) = parse_companion_mod(cx, prefix, suffix); - let mut view_items: [@ast::view_item] = []; - let mut items: [@ast::item] = []; + let mut view_items: [@ast::view_item]/~ = []/~; + let mut items: [@ast::item]/~ = []/~; eval_crate_directives(cx, cdirs, prefix, view_items, items); ret ({view_items: view_items + cview_items, items: items + citems}, @@ -42,7 +44,7 @@ We build the path to the companion mod by combining the prefix and the optional suffix then adding the .rs extension. */ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option) - -> ([@ast::view_item], [@ast::item], [ast::attribute]) { + -> ([@ast::view_item]/~, [@ast::item]/~, [ast::attribute]/~) { fn companion_file(+prefix: str, suffix: option) -> str { ret alt suffix { @@ -72,11 +74,11 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option) cx.sess.byte_pos = cx.sess.byte_pos + r0.pos; ret (m0.view_items, m0.items, inner_attrs.inner); } else { - ret ([], [], []); + ret ([]/~, []/~, []/~); } } -fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str { +fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]/~) -> @str { alt ::attr::first_attr_value_str_by_name(attrs, "path") { some(d) { ret d; @@ -86,8 +88,8 @@ fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str { } fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str, - &view_items: [@ast::view_item], - &items: [@ast::item]) { + &view_items: [@ast::view_item]/~, + &items: [@ast::item]/~) { alt cdir.node { ast::cdir_src_mod(id, attrs) { let file_path = cdir_path_opt(@(*id + ".rs"), attrs); @@ -108,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str, // Thread defids, chpos and byte_pos through the parsers cx.sess.chpos = r0.chpos; cx.sess.byte_pos = cx.sess.byte_pos + r0.pos; - items += [i]; + items += [i]/~; } ast::cdir_dir_mod(id, cdirs, attrs) { let path = cdir_path_opt(id, attrs); @@ -126,9 +128,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str, vis: ast::public, span: cdir.span}; cx.sess.next_id += 1; - items += [i]; + items += [i]/~; } - ast::cdir_view_item(vi) { view_items += [vi]; } + ast::cdir_view_item(vi) { view_items += [vi]/~; } ast::cdir_syntax(pth) { } } } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 5a3dceace8d..8687e011635 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -26,7 +26,7 @@ enum tt_frame_up { /* to break a circularity */ /* TODO: figure out how to have a uniquely linked stack, and change to `~` */ #[doc = "an unzipping of `token_tree`s"] type tt_frame = @{ - readme: [ast::token_tree], + readme: [ast::token_tree]/~, mut idx: uint, up: tt_frame_up }; @@ -41,7 +41,7 @@ type tt_reader = @{ }; fn new_tt_reader(span_diagnostic: diagnostic::span_handler, - itr: @interner::interner<@str>, src: [ast::token_tree]) + itr: @interner::interner<@str>, src: [ast::token_tree]/~) -> tt_reader { let r = @{span_diagnostic: span_diagnostic, interner: itr, mut cur: @{readme: src, mut idx: 0u, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 13b68b2ce70..d0847a974b7 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -13,6 +13,7 @@ import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed, seq_sep_none, token_to_str}; import common::*;//{parser_common}; import dvec::{dvec, extensions}; +import vec::{push}; export file_type; export parser; @@ -51,10 +52,10 @@ enum pexpr { */ enum class_contents { ctor_decl(fn_decl, blk, codemap::span), dtor_decl(blk, codemap::span), - members([@class_member]) } + members([@class_member]/~) } type arg_or_capture_item = either; -type item_info = (ident, item_, option<[attribute]>); +type item_info = (ident, item_, option<[attribute]/~>); class parser { let sess: parse_sess; @@ -176,14 +177,14 @@ class parser { // functions can't have constrained types. Not sure whether // that would be desirable anyway. See bug for the story on // constrained types. - let constrs: [@constr] = []; + let constrs: [@constr]/~ = []/~; let (ret_style, ret_ty) = self.parse_ret_ty(); ret {inputs: inputs, output: ret_ty, purity: purity, cf: ret_style, constraints: constrs}; } - fn parse_ty_methods() -> [ty_method] { + fn parse_ty_methods() -> [ty_method]/~ { self.parse_unspanned_seq(token::LBRACE, token::RBRACE, seq_sep_none()) { |p| let attrs = p.parse_outer_attributes(); @@ -215,7 +216,7 @@ class parser { // if i is the jth ident in args, return j // otherwise, fail - fn ident_index(args: [arg], i: ident) -> uint { + fn ident_index(args: [arg]/~, i: ident) -> uint { let mut j = 0u; for args.each {|a| if a.ident == i { ret j; } j += 1u; } self.fatal("unbound variable `" + *i + "` in constraint arg"); @@ -235,7 +236,7 @@ class parser { ret @{node: carg, span: sp}; } - fn parse_constr_arg(args: [arg]) -> @constr_arg { + fn parse_constr_arg(args: [arg]/~) -> @constr_arg { let sp = self.span; let mut carg = carg_base; if self.token == token::BINOP(token::STAR) { @@ -247,7 +248,7 @@ class parser { ret @{node: carg, span: sp}; } - fn parse_ty_constr(fn_args: [arg]) -> @constr { + fn parse_ty_constr(fn_args: [arg]/~) -> @constr { let lo = self.span.lo; let path = self.parse_path_without_tps(); let args = self.parse_unspanned_seq( @@ -261,7 +262,7 @@ class parser { fn parse_constr_in_type() -> @ty_constr { let lo = self.span.lo; let path = self.parse_path_without_tps(); - let args: [@ty_constr_arg] = self.parse_unspanned_seq( + let args: [@ty_constr_arg]/~ = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), {|p| p.parse_type_constr_arg()}); @@ -272,17 +273,17 @@ class parser { fn parse_constrs(pser: fn(parser) -> @constr_general) -> - [@constr_general] { - let mut constrs: [@constr_general] = []; + [@constr_general]/~ { + let mut constrs: [@constr_general]/~ = []/~; loop { let constr = pser(self); - constrs += [constr]; + constrs += [constr]/~; if self.token == token::COMMA { self.bump(); } else { ret constrs; } }; } - fn parse_type_constraints() -> [@ty_constr] { + fn parse_type_constraints() -> [@ty_constr]/~ { ret self.parse_constrs({|p| p.parse_constr_in_type()}); } @@ -359,10 +360,10 @@ class parser { self.bump(); ty_nil } else { - let mut ts = [self.parse_ty(false)]; + let mut ts = [self.parse_ty(false)]/~; while self.token == token::COMMA { self.bump(); - ts += [self.parse_ty(false)]; + ts += [self.parse_ty(false)]/~; } let t = if vec::len(ts) == 1u { ts[0].node } else { ty_tup(ts) }; @@ -583,22 +584,22 @@ class parser { let lo = self.span.lo; let global = self.eat(token::MOD_SEP); - let mut ids = []; + let mut ids = []/~; loop { let is_not_last = self.look_ahead(2u) != token::LT && self.look_ahead(1u) == token::MOD_SEP; if is_not_last { - ids += [parse_ident(self)]; + ids += [parse_ident(self)]/~; self.expect(token::MOD_SEP); } else { - ids += [parse_last_ident(self)]; + ids += [parse_last_ident(self)]/~; break; } } @{span: mk_sp(lo, self.last_span.hi), global: global, - idents: ids, rp: none, types: []} + idents: ids, rp: none, types: []/~} } fn parse_value_path() -> @path { @@ -639,7 +640,7 @@ class parser { self.parse_seq_lt_gt(some(token::COMMA), {|p| p.parse_ty(false)}) } else { - {node: [], span: path.span} + {node: []/~, span: path.span} } }; @@ -715,9 +716,9 @@ class parser { let lit = @spanned(lo, hi, lit_nil); ret self.mk_pexpr(lo, hi, expr_lit(lit)); } - let mut es = [self.parse_expr()]; + let mut es = [self.parse_expr()]/~; while self.token == token::COMMA { - self.bump(); es += [self.parse_expr()]; + self.bump(); es += [self.parse_expr()]/~; } hi = self.span.hi; self.expect(token::RPAREN); @@ -733,7 +734,7 @@ class parser { if self.is_keyword("mut") || is_plain_ident(self.token) && self.look_ahead(1u) == token::COLON { - let mut fields = [self.parse_field(token::COLON)]; + let mut fields = [self.parse_field(token::COLON)]/~; let mut base = none; while self.token != token::RBRACE { // optional comma before "with" @@ -750,7 +751,7 @@ class parser { // record ends by an optional trailing comma break; } - fields += [self.parse_field(token::COLON)]; + fields += [self.parse_field(token::COLON)]/~; } hi = self.span.hi; self.expect(token::RBRACE); @@ -997,7 +998,7 @@ class parser { self.expect(token::LT); self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty(false)}) - } else { [] }; + } else { []/~ }; e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e), self.get_str(i), tys)); @@ -1027,13 +1028,13 @@ class parser { let blk = self.parse_fn_block_expr(); alt e.node { expr_call(f, args, false) { - e = pexpr(@{node: expr_call(f, args + [blk], true) + e = pexpr(@{node: expr_call(f, args + [blk]/~, true) with *self.to_expr(e)}); } _ { e = self.mk_pexpr(lo, self.last_span.hi, expr_call(self.to_expr(e), - [blk], true)); + [blk]/~, true)); } } } @@ -1085,10 +1086,10 @@ class parser { ret alt self.token { token::LPAREN | token::LBRACE | token::LBRACKET { let ket = flip(self.token); - tt_delim([parse_tt_flat(self, true)] + + tt_delim([parse_tt_flat(self, true)]/~ + self.parse_seq_to_before_end(ket, seq_sep_none(), {|p| p.parse_token_tree()}) - + [parse_tt_flat(self, true)]) + + [parse_tt_flat(self, true)]/~) } _ { parse_tt_flat(self, false) } }; @@ -1354,7 +1355,7 @@ class parser { let b_arg = vec::last(args); let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi, ctor(b_arg)); - @{node: expr_call(f, vec::init(args) + [last], true) + @{node: expr_call(f, vec::init(args) + [last]/~, true) with *call} } _ { @@ -1385,14 +1386,14 @@ class parser { else { alt_exhaustive }; let discriminant = self.parse_expr(); self.expect(token::LBRACE); - let mut arms: [arm] = []; + let mut arms: [arm]/~ = []/~; while self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = none; if self.eat_keyword("if") { guard = some(self.parse_expr()); } if self.token == token::FAT_ARROW { self.bump(); } let blk = self.parse_block(); - arms += [{pats: pats, guard: guard, body: blk}]; + arms += [{pats: pats, guard: guard, body: blk}]/~; } let mut hi = self.span.hi; self.bump(); @@ -1434,10 +1435,10 @@ class parser { } } - fn parse_pats() -> [@pat] { - let mut pats = []; + fn parse_pats() -> [@pat]/~ { + let mut pats = []/~; loop { - pats += [self.parse_pat()]; + pats += [self.parse_pat()]/~; if self.token == token::BINOP(token::OR) { self.bump(); } else { ret pats; } }; @@ -1463,7 +1464,7 @@ class parser { } token::LBRACE { self.bump(); - let mut fields = []; + let mut fields = []/~; let mut etc = false; let mut first = true; while self.token != token::RBRACE { @@ -1498,7 +1499,7 @@ class parser { node: pat_ident(fieldpath, none), span: mk_sp(lo, hi)}; } - fields += [{ident: fieldname, pat: subpat}]; + fields += [{ident: fieldname, pat: subpat}]/~; } hi = self.span.hi; self.bump(); @@ -1513,10 +1514,10 @@ class parser { let expr = self.mk_expr(lo, hi, expr_lit(lit)); pat = pat_lit(expr); } else { - let mut fields = [self.parse_pat()]; + let mut fields = [self.parse_pat()]/~; while self.token == token::COMMA { self.bump(); - fields += [self.parse_pat()]; + fields += [self.parse_pat()]/~; } if vec::len(fields) == 1u { self.expect(token::COMMA); } hi = self.span.hi; @@ -1548,7 +1549,7 @@ class parser { } else { let enum_path = self.parse_path_with_tps(true); hi = enum_path.span.hi; - let mut args: [@pat] = []; + let mut args: [@pat]/~ = []/~; let mut star_pat = false; alt self.token { token::LPAREN { @@ -1604,9 +1605,9 @@ class parser { fn parse_let() -> @decl { let is_mutbl = self.eat_keyword("mut"); let lo = self.span.lo; - let mut locals = [self.parse_local(is_mutbl, true)]; + let mut locals = [self.parse_local(is_mutbl, true)]/~; while self.eat(token::COMMA) { - locals += [self.parse_local(is_mutbl, true)]; + locals += [self.parse_local(is_mutbl, true)]/~; } ret @spanned(lo, self.last_span.hi, decl_local(locals)); } @@ -1628,8 +1629,8 @@ class parser { span: mk_sp(lo, self.last_span.hi)}; } - fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt { - fn check_expected_item(p: parser, current_attrs: [attribute]) { + fn parse_stmt(+first_item_attrs: [attribute]/~) -> @stmt { + fn check_expected_item(p: parser, current_attrs: [attribute]/~) { // If we have attributes then we should have an item if vec::is_not_empty(current_attrs) { p.fatal("expected item"); @@ -1645,7 +1646,7 @@ class parser { } else { let mut item_attrs; alt self.parse_outer_attrs_or_ext(first_item_attrs) { - none { item_attrs = []; } + none { item_attrs = []/~; } some(left(attrs)) { item_attrs = attrs; } some(right(ext)) { ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id())); @@ -1685,14 +1686,15 @@ class parser { ret blk; } - fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) { + fn parse_inner_attrs_and_block(parse_attrs: bool) + -> ([attribute]/~, blk) { fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) -> - {inner: [attribute], next: [attribute]} { + {inner: [attribute]/~, next: [attribute]/~} { if parse_attrs { p.parse_inner_attrs_and_next() } else { - {inner: [], next: []} + {inner: []/~, next: []/~} } } @@ -1727,12 +1729,12 @@ class parser { // necessary, and this should take a qualifier. // some blocks start with "#{"... fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk { - self.parse_block_tail_(lo, s, []) + self.parse_block_tail_(lo, s, []/~) } fn parse_block_tail_(lo: uint, s: blk_check_mode, - +first_item_attrs: [attribute]) -> blk { - let mut stmts = []; + +first_item_attrs: [attribute]/~) -> blk { + let mut stmts = []/~; let mut expr = none; let {attrs_remaining, view_items} = self.parse_view(first_item_attrs, true); @@ -1749,13 +1751,14 @@ class parser { } _ { let stmt = self.parse_stmt(initial_attrs); - initial_attrs = []; + initial_attrs = []/~; alt stmt.node { stmt_expr(e, stmt_id) { // Expression without semicolon: alt self.token { token::SEMI { self.bump(); - stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}]; + push(stmts, + @{node: stmt_semi(e, stmt_id) with *stmt}); } token::RBRACE { expr = some(e); @@ -1766,13 +1769,13 @@ class parser { but found '" + token_to_str(self.reader, t) + "'"); } - stmts += [stmt]; + stmts += [stmt]/~; } } } _ { // All other kinds of statements: - stmts += [stmt]; + stmts += [stmt]/~; if classify::stmt_ends_with_semi(*stmt) { self.expect(token::SEMI); @@ -1790,30 +1793,32 @@ class parser { } fn parse_ty_param() -> ty_param { - let mut bounds = []; + let mut bounds = []/~; let ident = self.parse_ident(); if self.eat(token::COLON) { while self.token != token::COMMA && self.token != token::GT { - if self.eat_keyword("send") { bounds += [bound_send]; } - else if self.eat_keyword("copy") { bounds += [bound_copy]; } - else if self.eat_keyword("const") { bounds += [bound_const]; } - else { bounds += [bound_iface(self.parse_ty(false))]; } + if self.eat_keyword("send") { push(bounds, bound_send); } + else if self.eat_keyword("copy") { push(bounds, bound_copy) } + else if self.eat_keyword("const") { + push(bounds, bound_const) + } + else { push(bounds, bound_iface(self.parse_ty(false))); } } } ret {ident: ident, id: self.get_id(), bounds: @bounds}; } - fn parse_ty_params() -> [ty_param] { + fn parse_ty_params() -> [ty_param]/~ { if self.eat(token::LT) { self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()}) - } else { [] } + } else { []/~ } } fn parse_fn_decl(purity: purity, parse_arg_fn: fn(parser) -> arg_or_capture_item) -> (fn_decl, capture_clause) { - let args_or_capture_items: [arg_or_capture_item] = + let args_or_capture_items: [arg_or_capture_item]/~ = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn); @@ -1824,7 +1829,7 @@ class parser { // Use the args list to translate each bound variable // mentioned in a constraint to an arg index. // Seems weird to do this in the parser, but I'm not sure how else to. - let mut constrs = []; + let mut constrs = []/~; if self.token == token::COLON { self.bump(); constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) }); @@ -1840,7 +1845,7 @@ class parser { fn parse_fn_block_decl() -> (fn_decl, capture_clause) { let inputs_captures = { if self.eat(token::OROR) { - [] + []/~ } else { self.parse_unspanned_seq( token::BINOP(token::OR), token::BINOP(token::OR), @@ -1857,11 +1862,11 @@ class parser { output: output, purity: impure_fn, cf: return_val, - constraints: []}, + constraints: []/~}, @either::rights(inputs_captures)); } - fn parse_fn_header() -> {ident: ident, tps: [ty_param]} { + fn parse_fn_header() -> {ident: ident, tps: [ty_param]/~} { let id = self.parse_value_ident(); let ty_params = self.parse_ty_params(); ret {ident: id, tps: ty_params}; @@ -1869,7 +1874,7 @@ class parser { fn mk_item(lo: uint, hi: uint, +ident: ident, +node: item_, vis: visibility, - +attrs: [attribute]) -> @item { + +attrs: [attribute]/~) -> @item { ret @{ident: ident, attrs: attrs, id: self.get_id(), @@ -1922,9 +1927,9 @@ class parser { } // Parses three variants (with the region/type params always optional): - // impl /& of to_str for [T] { ... } - // impl name/& of to_str for [T] { ... } - // impl name/& for [T] { ... } + // impl /& of to_str for [T]/~ { ... } + // impl name/& of to_str for [T]/~ { ... } + // impl name/& for [T]/~ { ... } fn parse_item_impl() -> item_info { fn wrap_path(p: parser, pt: @path) -> @ty { @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span} @@ -1936,7 +1941,7 @@ class parser { (none, self.parse_region_param(), self.parse_ty_params()) } else if self.is_keyword("of") { - (none, rp_none, []) + (none, rp_none, []/~) } else { let id = self.parse_ident(); let rp = self.parse_region_param(); @@ -1956,10 +1961,10 @@ class parser { }; self.expect_keyword("for"); let ty = self.parse_ty(false); - let mut meths = []; + let mut meths = []/~; self.expect(token::LBRACE); while !self.eat(token::RBRACE) { - meths += [self.parse_method(public)]; + meths += [self.parse_method(public)]/~; } (ident, item_impl(tps, rp, ifce, ty, meths), none) } @@ -1969,7 +1974,7 @@ class parser { // the return type of the ctor function. fn ident_to_path_tys(i: ident, rp: region_param, - typarams: [ty_param]) -> @path { + typarams: [ty_param]/~) -> @path { let s = self.last_span; // Hack. But then, this whole function is in service of a hack. @@ -1978,7 +1983,7 @@ class parser { rp_self { some(self.region_from_name(some(@"self"))) } }; - @{span: s, global: false, idents: [i], + @{span: s, global: false, idents: [i]/~, rp: a_r, types: vec::map(typarams, {|tp| @{id: self.get_id(), @@ -1992,7 +1997,7 @@ class parser { id: self.get_id()} } - fn parse_iface_ref_list() -> [@iface_ref] { + fn parse_iface_ref_list() -> [@iface_ref]/~ { self.parse_seq_to_before_end( token::LBRACE, seq_sep_trailing_disallowed(token::COMMA), {|p| p.parse_iface_ref()}) @@ -2003,11 +2008,11 @@ class parser { let rp = self.parse_region_param(); let ty_params = self.parse_ty_params(); let class_path = self.ident_to_path_tys(class_name, rp, ty_params); - let ifaces : [@iface_ref] = if self.eat(token::COLON) + let ifaces : [@iface_ref]/~ = if self.eat(token::COLON) { self.parse_iface_ref_list() } - else { [] }; + else { []/~ }; self.expect(token::LBRACE); - let mut ms: [@class_member] = []; + let mut ms: [@class_member]/~ = []/~; let ctor_id = self.get_id(); let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none; let mut the_dtor : option<(blk, codemap::span)> = none; @@ -2092,16 +2097,16 @@ class parser { } else if self.eat_keyword("priv") { self.expect(token::LBRACE); - let mut results = []; + let mut results = []/~; while self.token != token::RBRACE { - results += [self.parse_single_class_item(private)]; + results += [self.parse_single_class_item(private)]/~; } self.bump(); ret members(results); } else { // Probably need to parse attrs - ret members([self.parse_single_class_item(public)]); + ret members([self.parse_single_class_item(public)]/~); } } @@ -2112,11 +2117,11 @@ class parser { } fn parse_mod_items(term: token::token, - +first_item_attrs: [attribute]) -> _mod { + +first_item_attrs: [attribute]/~) -> _mod { // Shouldn't be any view items since we've already parsed an item attr let {attrs_remaining, view_items} = self.parse_view(first_item_attrs, false); - let mut items: [@item] = []; + let mut items: [@item]/~ = []/~; let mut first = true; while self.token != term { let mut attrs = self.parse_outer_attributes(); @@ -2124,7 +2129,7 @@ class parser { #debug["parse_mod_items: parse_item(attrs=%?)", attrs]; let vis = self.parse_visibility(private); alt self.parse_item(attrs, vis) { - some(i) { items += [i]; } + some(i) { items += [i]/~; } _ { self.fatal("expected item but found '" + token_to_str(self.reader, self.token) + "'"); @@ -2160,7 +2165,7 @@ class parser { (id, item_mod(m), some(inner_attrs.inner)) } - fn parse_item_native_fn(+attrs: [attribute], + fn parse_item_native_fn(+attrs: [attribute]/~, purity: purity) -> @native_item { let lo = self.last_span.lo; let t = self.parse_fn_header(); @@ -2186,22 +2191,22 @@ class parser { else { self.unexpected(); } } - fn parse_native_item(+attrs: [attribute]) -> + fn parse_native_item(+attrs: [attribute]/~) -> @native_item { self.parse_item_native_fn(attrs, self.parse_fn_purity()) } - fn parse_native_mod_items(+first_item_attrs: [attribute]) -> + fn parse_native_mod_items(+first_item_attrs: [attribute]/~) -> native_mod { // Shouldn't be any view items since we've already parsed an item attr let {attrs_remaining, view_items} = self.parse_view(first_item_attrs, false); - let mut items: [@native_item] = []; + let mut items: [@native_item]/~ = []/~; let mut initial_attrs = attrs_remaining; while self.token != token::RBRACE { let attrs = initial_attrs + self.parse_outer_attributes(); - initial_attrs = []; - items += [self.parse_native_item(attrs)]; + initial_attrs = []/~; + items += [self.parse_native_item(attrs)]/~; } ret {view_items: view_items, items: items}; @@ -2246,7 +2251,7 @@ class parser { let id = self.parse_ident(); let rp = self.parse_region_param(); let ty_params = self.parse_ty_params(); - let mut variants: [variant] = []; + let mut variants: [variant]/~ = []/~; // Newtype syntax if self.token == token::EQ { self.check_restricted_keywords_(*id); @@ -2256,12 +2261,12 @@ class parser { let variant = spanned(ty.span.lo, ty.span.hi, {name: id, - attrs: [], - args: [{ty: ty, id: self.get_id()}], + attrs: []/~, + args: [{ty: ty, id: self.get_id()}]/~, id: self.get_id(), disr_expr: none, vis: public}); - ret (id, item_enum([variant], ty_params, rp), none); + ret (id, item_enum([variant]/~, ty_params, rp), none); } self.expect(token::LBRACE); @@ -2272,7 +2277,7 @@ class parser { let vlo = self.span.lo; let vis = self.parse_visibility(default_vis); let ident = self.parse_value_ident(); - let mut args = [], disr_expr = none; + let mut args = []/~, disr_expr = none; if self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( @@ -2280,7 +2285,7 @@ class parser { seq_sep_trailing_disallowed(token::COMMA), {|p| p.parse_ty(false)}); for arg_tys.each {|ty| - args += [{ty: ty, id: self.get_id()}]; + args += [{ty: ty, id: self.get_id()}]/~; } } else if self.eat(token::EQ) { have_disr = true; @@ -2290,7 +2295,7 @@ class parser { let vr = {name: ident, attrs: variant_attrs, args: args, id: self.get_id(), disr_expr: disr_expr, vis: vis}; - variants += [spanned(vlo, self.last_span.hi, vr)]; + variants += [spanned(vlo, self.last_span.hi, vr)]/~; if !self.eat(token::COMMA) { break; } } @@ -2333,7 +2338,7 @@ class parser { } } - fn parse_item(+attrs: [attribute], vis: visibility) + fn parse_item(+attrs: [attribute]/~, vis: visibility) -> option<@item> { let lo = self.span.lo; let (ident, item_, extra_attrs) = if self.eat_keyword("const") { @@ -2384,20 +2389,20 @@ class parser { fn parse_view_path() -> @view_path { let lo = self.span.lo; let first_ident = self.parse_ident(); - let mut path = [first_ident]; + let mut path = [first_ident]/~; #debug("parsed view_path: %s", *first_ident); alt self.token { token::EQ { // x = foo::bar self.bump(); - path = [self.parse_ident()]; + path = [self.parse_ident()]/~; while self.token == token::MOD_SEP { self.bump(); let id = self.parse_ident(); - path += [id]; + path += [id]/~; } let path = @{span: mk_sp(lo, self.span.hi), global: false, - idents: path, rp: none, types: []}; + idents: path, rp: none, types: []/~}; ret @spanned(lo, self.span.hi, view_path_simple(first_ident, path, self.get_id())); } @@ -2411,7 +2416,7 @@ class parser { token::IDENT(i, _) { self.bump(); - path += [self.get_str(i)]; + path += [self.get_str(i)]/~; } // foo::bar::{a,b,c} @@ -2422,7 +2427,7 @@ class parser { {|p| p.parse_path_list_ident()}); let path = @{span: mk_sp(lo, self.span.hi), global: false, idents: path, - rp: none, types: []}; + rp: none, types: []/~}; ret @spanned(lo, self.span.hi, view_path_list(path, idents, self.get_id())); } @@ -2432,7 +2437,7 @@ class parser { self.bump(); let path = @{span: mk_sp(lo, self.span.hi), global: false, idents: path, - rp: none, types: []}; + rp: none, types: []/~}; ret @spanned(lo, self.span.hi, view_path_glob(path, self.get_id())); } @@ -2445,16 +2450,16 @@ class parser { } let last = path[vec::len(path) - 1u]; let path = @{span: mk_sp(lo, self.span.hi), global: false, - idents: path, rp: none, types: []}; + idents: path, rp: none, types: []/~}; ret @spanned(lo, self.span.hi, view_path_simple(last, path, self.get_id())); } - fn parse_view_paths() -> [@view_path] { - let mut vp = [self.parse_view_path()]; + fn parse_view_paths() -> [@view_path]/~ { + let mut vp = [self.parse_view_path()]/~; while self.token == token::COMMA { self.bump(); - vp += [self.parse_view_path()]; + vp += [self.parse_view_path()]/~; } ret vp; } @@ -2468,7 +2473,7 @@ class parser { || self.token_is_keyword("export", tok) } - fn parse_view_item(+attrs: [attribute]) -> @view_item { + fn parse_view_item(+attrs: [attribute]/~) -> @view_item { let lo = self.span.lo, vis = self.parse_visibility(private); let node = if self.eat_keyword("use") { self.parse_use() @@ -2482,14 +2487,14 @@ class parser { vis: vis, span: mk_sp(lo, self.last_span.hi)} } - fn parse_view(+first_item_attrs: [attribute], - only_imports: bool) -> {attrs_remaining: [attribute], - view_items: [@view_item]} { + fn parse_view(+first_item_attrs: [attribute]/~, + only_imports: bool) -> {attrs_remaining: [attribute]/~, + view_items: [@view_item]/~} { let mut attrs = first_item_attrs + self.parse_outer_attributes(); - let mut items = []; + let mut items = []/~; while if only_imports { self.is_keyword("import") } else { self.is_view_item() } { - items += [self.parse_view_item(attrs)]; + items += [self.parse_view_item(attrs)]/~; attrs = self.parse_outer_attributes(); } {attrs_remaining: attrs, view_items: items} @@ -2502,7 +2507,7 @@ class parser { let first_item_outer_attrs = crate_attrs.next; let m = self.parse_mod_items(token::EOF, first_item_outer_attrs); ret @spanned(lo, self.span.lo, - {directives: [], + {directives: []/~, module: m, attrs: crate_attrs.inner, config: self.cfg}); @@ -2523,7 +2528,7 @@ class parser { // // Each directive imperatively extends its environment with 0 or more // items. - fn parse_crate_directive(first_outer_attr: [attribute]) -> + fn parse_crate_directive(first_outer_attr: [attribute]/~) -> crate_directive { // Collect the next attributes @@ -2564,8 +2569,8 @@ class parser { } fn parse_crate_directives(term: token::token, - first_outer_attr: [attribute]) -> - [@crate_directive] { + first_outer_attr: [attribute]/~) -> + [@crate_directive]/~ { // This is pretty ugly. If we have an outer attribute then we can't // accept seeing the terminator next, so if we do see it then fail the @@ -2574,12 +2579,12 @@ class parser { self.expect_keyword("mod"); } - let mut cdirs: [@crate_directive] = []; + let mut cdirs: [@crate_directive]/~ = []/~; let mut first_outer_attr = first_outer_attr; while self.token != term { let cdir = @self.parse_crate_directive(first_outer_attr); - cdirs += [cdir]; - first_outer_attr = []; + cdirs += [cdir]/~; + first_outer_attr = []/~; } ret cdirs; } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 9d6427912df..feffbd4020c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -260,7 +260,7 @@ fn contextual_keyword_table() -> hashmap { "with", /* temp */ "sep", "many", "at_least_one", "parse" - ]; + ]/~; for keys.each {|word| words.insert(word, ()); } @@ -298,7 +298,7 @@ fn restricted_keyword_table() -> hashmap { "true", "trait", "type", "unchecked", "unsafe", "while" - ]; + ]/~; for keys.each {|word| words.insert(word, ()); } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 329dff0c4c1..5f10fe0eb47 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -71,7 +71,7 @@ fn tok_str(++t: token) -> str { } } -fn buf_str(toks: [mut token], szs: [mut int], left: uint, right: uint, +fn buf_str(toks: [mut token]/~, szs: [mut int]/~, left: uint, right: uint, lim: uint) -> str { let n = vec::len(toks); assert (n == vec::len(szs)); @@ -100,9 +100,9 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer { // fall behind. let n: uint = 3u * linewidth; #debug("mk_printer %u", linewidth); - let token: [mut token] = vec::to_mut(vec::from_elem(n, EOF)); - let size: [mut int] = vec::to_mut(vec::from_elem(n, 0)); - let scan_stack: [mut uint] = vec::to_mut(vec::from_elem(n, 0u)); + let token: [mut token]/~ = vec::to_mut(vec::from_elem(n, EOF)); + let size: [mut int]/~ = vec::to_mut(vec::from_elem(n, 0)); + let scan_stack: [mut uint]/~ = vec::to_mut(vec::from_elem(n, 0u)); @{out: out, buf_len: n, mut margin: linewidth as int, @@ -206,8 +206,8 @@ type printer = @{ mut space: int, // number of spaces left on line mut left: uint, // index of left side of input stream mut right: uint, // index of right side of input stream - token: [mut token], // ring-buffr stream goes through - size: [mut int], // ring-buffer of calculated sizes + token: [mut token]/~, // ring-buffr stream goes through + size: [mut int]/~, // ring-buffer of calculated sizes mut left_total: int, // running size of stream "...left" mut right_total: int, // running size of stream "...right" // pseudo-stack, really a ring too. Holds the @@ -216,7 +216,7 @@ type printer = @{ // BEGIN (if there is any) on top of it. Stuff is flushed off the // bottom as it becomes irrelevant due to the primary ring-buffer // advancing. - mut scan_stack: [mut uint], + mut scan_stack: [mut uint]/~, mut scan_stack_empty: bool, // top==bottom disambiguator mut top: uint, // index of top of scan_stack mut bottom: uint, // index of bottom of scan_stack @@ -231,7 +231,7 @@ impl printer for printer { // be very careful with this! fn replace_last_token(t: token) { self.token[self.right] = t; } fn pretty_print(t: token) { - #debug("pp [%u,%u]", self.left, self.right); + #debug("pp [%u,%u]/~", self.left, self.right); alt t { EOF { if !self.scan_stack_empty { @@ -248,17 +248,17 @@ impl printer for printer { self.left = 0u; self.right = 0u; } else { self.advance_right(); } - #debug("pp BEGIN/buffer [%u,%u]", self.left, self.right); + #debug("pp BEGIN/buffer [%u,%u]/~", self.left, self.right); self.token[self.right] = t; self.size[self.right] = -self.right_total; self.scan_push(self.right); } END { if self.scan_stack_empty { - #debug("pp END/print [%u,%u]", self.left, self.right); + #debug("pp END/print [%u,%u]/~", self.left, self.right); self.print(t, 0); } else { - #debug("pp END/buffer [%u,%u]", self.left, self.right); + #debug("pp END/buffer [%u,%u]/~", self.left, self.right); self.advance_right(); self.token[self.right] = t; self.size[self.right] = -1; @@ -272,7 +272,7 @@ impl printer for printer { self.left = 0u; self.right = 0u; } else { self.advance_right(); } - #debug("pp BREAK/buffer [%u,%u]", self.left, self.right); + #debug("pp BREAK/buffer [%u,%u]/~", self.left, self.right); self.check_stack(0); self.scan_push(self.right); self.token[self.right] = t; @@ -281,10 +281,10 @@ impl printer for printer { } STRING(s, len) { if self.scan_stack_empty { - #debug("pp STRING/print [%u,%u]", self.left, self.right); + #debug("pp STRING/print [%u,%u]/~", self.left, self.right); self.print(t, len); } else { - #debug("pp STRING/buffer [%u,%u]", self.left, self.right); + #debug("pp STRING/buffer [%u,%u]/~", self.left, self.right); self.advance_right(); self.token[self.right] = t; self.size[self.right] = len; @@ -295,7 +295,7 @@ impl printer for printer { } } fn check_stream() { - #debug("check_stream [%u, %u] with left_total=%d, right_total=%d", + #debug("check_stream [%u, %u]/~ with left_total=%d, right_total=%d", self.left, self.right, self.left_total, self.right_total); if self.right_total - self.left_total > self.space { #debug("scan window is %d, longer than space on line (%d)", @@ -347,7 +347,7 @@ impl printer for printer { assert (self.right != self.left); } fn advance_left(++x: token, L: int) { - #debug("advnce_left [%u,%u], sizeof(%u)=%d", self.left, self.right, + #debug("advnce_left [%u,%u]/~, sizeof(%u)=%d", self.left, self.right, self.left, L); if L >= 0 { self.print(x, L); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b38f4c35d86..0d3855b4f93 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -26,8 +26,8 @@ fn no_ann() -> pp_ann { type ps = @{s: pp::printer, cm: option, - comments: option<[comments::cmnt]>, - literals: option<[comments::lit]>, + comments: option<[comments::cmnt]/~>, + literals: option<[comments::lit]/~>, mut cur_cmnt: uint, mut cur_lit: uint, boxes: dvec, @@ -46,8 +46,8 @@ fn end(s: ps) { fn rust_printer(writer: io::writer) -> ps { ret @{s: pp::mk_printer(writer, default_columns), cm: none::, - comments: none::<[comments::cmnt]>, - literals: none::<[comments::lit]>, + comments: none::<[comments::cmnt]/~>, + literals: none::<[comments::lit]/~>, mut cur_cmnt: 0u, mut cur_lit: 0u, boxes: dvec(), @@ -97,7 +97,7 @@ fn item_to_str(i: @ast::item) -> str { ret to_str(i, print_item); } fn attr_to_str(i: ast::attribute) -> str { ret to_str(i, print_attribute); } -fn typarams_to_str(tps: [ast::ty_param]) -> str { +fn typarams_to_str(tps: [ast::ty_param]/~) -> str { ret to_str(tps, print_type_params) } @@ -106,7 +106,7 @@ fn path_to_str(&&p: @ast::path) -> str { } fn fun_to_str(decl: ast::fn_decl, name: ast::ident, - params: [ast::ty_param]) -> str { + params: [ast::ty_param]/~) -> str { let buffer = io::mem_buffer(); let s = rust_printer(io::mem_buffer_writer(buffer)); print_fn(s, decl, name, params); @@ -119,15 +119,15 @@ fn fun_to_str(decl: ast::fn_decl, name: ast::ident, #[test] fn test_fun_to_str() { let decl: ast::fn_decl = { - inputs: [], + inputs: []/~, output: @{id: 0, node: ast::ty_nil, span: ast_util::dummy_sp()}, purity: ast::impure_fn, cf: ast::return_val, - constraints: [] + constraints: []/~ }; - assert fun_to_str(decl, "a", []) == "fn a()"; + assert fun_to_str(decl, "a", []/~) == "fn a()"; } fn block_to_str(blk: ast::blk) -> str { @@ -158,8 +158,8 @@ fn variant_to_str(var: ast::variant) -> str { fn test_variant_to_str() { let var = ast_util::respan(ast_util::dummy_sp(), { name: "principle_skinner", - attrs: [], - args: [], + attrs: []/~, + args: []/~, id: 0, disr_expr: none }); @@ -254,7 +254,7 @@ fn synth_comment(s: ps, text: str) { word(s.s, "*/"); } -fn commasep(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) { +fn commasep(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN)) { box(s, 0u, b); let mut first = true; for elts.each {|elt| @@ -265,7 +265,7 @@ fn commasep(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) { } -fn commasep_cmnt(s: ps, b: breaks, elts: [IN], op: fn(ps, IN), +fn commasep_cmnt(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN), get_span: fn(IN) -> codemap::span) { box(s, 0u, b); let len = vec::len::(elts); @@ -284,12 +284,12 @@ fn commasep_cmnt(s: ps, b: breaks, elts: [IN], op: fn(ps, IN), end(s); } -fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) { +fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]/~) { fn expr_span(&&expr: @ast::expr) -> codemap::span { ret expr.span; } commasep_cmnt(s, b, exprs, print_expr, expr_span); } -fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) { +fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]/~) { print_inner_attributes(s, attrs); for _mod.view_items.each {|vitem| print_view_item(s, vitem); @@ -297,7 +297,7 @@ fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) { for _mod.items.each {|item| print_item(s, item); } } -fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) { +fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]/~) { print_inner_attributes(s, attrs); for nmod.view_items.each {|vitem| print_view_item(s, vitem); @@ -504,7 +504,7 @@ fn print_item(s: ps, &&item: @ast::item) { hardbreak_if_not_bol(s); maybe_print_comment(s, ctor.span.lo); head(s, "new"); - print_fn_args_and_ret(s, ctor.node.dec, []); + print_fn_args_and_ret(s, ctor.node.dec, []/~); space(s.s); print_block(s, ctor.node.body); option::iter(m_dtor) {|dtor| @@ -626,7 +626,7 @@ fn print_method(s: ps, meth: @ast::method) { print_block_with_attrs(s, meth.body, meth.attrs); } -fn print_outer_attributes(s: ps, attrs: [ast::attribute]) { +fn print_outer_attributes(s: ps, attrs: [ast::attribute]/~) { let mut count = 0; for attrs.each {|attr| alt attr.node.style { @@ -637,7 +637,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) { if count > 0 { hardbreak_if_not_bol(s); } } -fn print_inner_attributes(s: ps, attrs: [ast::attribute]) { +fn print_inner_attributes(s: ps, attrs: [ast::attribute]/~) { let mut count = 0; for attrs.each {|attr| alt attr.node.style { @@ -685,7 +685,7 @@ fn print_block(s: ps, blk: ast::blk) { print_possibly_embedded_block(s, blk, block_normal, indent_unit); } -fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]) { +fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]/~) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs); } @@ -694,11 +694,11 @@ enum embed_type { block_macro, block_block_fn, block_normal, } fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type, indented: uint) { print_possibly_embedded_block_( - s, blk, embedded, indented, []); + s, blk, embedded, indented, []/~); } fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type, - indented: uint, attrs: [ast::attribute]) { + indented: uint, attrs: [ast::attribute]/~) { alt blk.node.rules { ast::unchecked_blk { word(s.s, "unchecked"); } ast::unsafe_blk { word(s.s, "unsafe"); } @@ -811,10 +811,10 @@ fn print_mac(s: ps, m: ast::mac) { fn print_vstore(s: ps, t: ast::vstore) { alt t { - ast::vstore_fixed(some(i)) { word_space(s, #fmt("/%u", i)); } - ast::vstore_fixed(none) { word_space(s, "/_"); } - ast::vstore_uniq { word_space(s, "/~"); } - ast::vstore_box { word_space(s, "/@"); } + ast::vstore_fixed(some(i)) { word(s.s, #fmt("/%u", i)); } + ast::vstore_fixed(none) { word(s.s, "/_"); } + ast::vstore_uniq { word(s.s, "/~"); } + ast::vstore_box { word(s.s, "/@"); } ast::vstore_slice(r) { word(s.s, "/"); print_region(s, r); } } } @@ -1259,18 +1259,18 @@ fn print_pat(s: ps, &&pat: @ast::pat) { } fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident, - typarams: [ast::ty_param]) { + typarams: [ast::ty_param]/~) { alt decl.purity { ast::impure_fn { head(s, "fn") } _ { head(s, purity_to_str(decl.purity) + " fn") } } word(s.s, *name); print_type_params(s, typarams); - print_fn_args_and_ret(s, decl, []); + print_fn_args_and_ret(s, decl, []/~); } fn print_fn_args(s: ps, decl: ast::fn_decl, - cap_items: [ast::capture_item]) { + cap_items: [ast::capture_item]/~) { commasep(s, inconsistent, decl.inputs, print_arg); if cap_items.is_not_empty() { let mut first = decl.inputs.is_empty(); @@ -1284,7 +1284,7 @@ fn print_fn_args(s: ps, decl: ast::fn_decl, } fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl, - cap_items: [ast::capture_item]) { + cap_items: [ast::capture_item]/~) { popen(s); print_fn_args(s, decl, cap_items); pclose(s); @@ -1301,7 +1301,7 @@ fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl, } fn print_fn_block_args(s: ps, decl: ast::fn_decl, - cap_items: [ast::capture_item]) { + cap_items: [ast::capture_item]/~) { word(s.s, "|"); print_fn_args(s, decl, cap_items); word(s.s, "|"); @@ -1329,7 +1329,7 @@ fn print_arg_mode(s: ps, m: ast::mode) { if ms != "" { word(s.s, ms); } } -fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) { +fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]/~) { if vec::len(*bounds) > 0u { word(s.s, ":"); for vec::each(*bounds) {|bound| @@ -1351,7 +1351,7 @@ fn print_region_param(s: ps, rp: ast::region_param) { } } -fn print_type_params(s: ps, &¶ms: [ast::ty_param]) { +fn print_type_params(s: ps, &¶ms: [ast::ty_param]/~) { if vec::len(params) > 0u { word(s.s, "<"); fn printParam(s: ps, param: ast::ty_param) { @@ -1408,7 +1408,7 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) { } } -fn print_view_paths(s: ps, vps: [@ast::view_path]) { +fn print_view_paths(s: ps, vps: [@ast::view_path]/~) { commasep(s, inconsistent, vps, print_view_path); } @@ -1480,7 +1480,7 @@ fn print_arg(s: ps, input: ast::arg) { fn print_ty_fn(s: ps, opt_proto: option, decl: ast::fn_decl, id: option, - tps: option<[ast::ty_param]>) { + tps: option<[ast::ty_param]/~>) { ibox(s, indent_unit); word(s.s, opt_proto_to_str(opt_proto)); alt id { some(id) { word(s.s, " "); word(s.s, *id); } _ { } } @@ -1682,7 +1682,8 @@ fn next_comment(s: ps) -> option { } } -fn constr_args_to_str(f: fn@(T) -> str, args: [@ast::sp_constr_arg]) -> +fn constr_args_to_str(f: fn@(T) -> str, + args: [@ast::sp_constr_arg]/~) -> str { let mut comma = false; let mut s = "("; @@ -1727,7 +1728,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str { c.node.args); } -fn constrs_str(constrs: [T], elt: fn(T) -> str) -> str { +fn constrs_str(constrs: [T]/~, elt: fn(T) -> str) -> str { let mut s = "", colon = true; for constrs.each {|c| if colon { s += " : "; colon = false; } else { s += ", "; } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 714097d356d..6a07d7f62ca 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -13,13 +13,13 @@ import codemap::span; enum vt { mk_vt(visitor), } enum fn_kind { - fk_item_fn(ident, [ty_param]), //< an item declared with fn() - fk_method(ident, [ty_param], @method), + fk_item_fn(ident, [ty_param]/~), //< an item declared with fn() + fk_method(ident, [ty_param]/~, @method), fk_anon(proto, capture_clause), //< an anonymous function like fn@(...) fk_fn_block(capture_clause), //< a block {||...} - fk_ctor(ident, [ty_param], node_id /* self id */, + fk_ctor(ident, [ty_param]/~, node_id /* self id */, def_id /* parent class id */), // class constructor - fk_dtor([ty_param], node_id /* self id */, + fk_dtor([ty_param]/~, node_id /* self id */, def_id /* parent class id */) // class destructor } @@ -33,13 +33,13 @@ fn name_of_fn(fk: fn_kind) -> ident { } } -fn tps_of_fn(fk: fn_kind) -> [ty_param] { +fn tps_of_fn(fk: fn_kind) -> [ty_param]/~ { alt fk { fk_item_fn(_, tps) | fk_method(_, tps, _) | fk_ctor(_, tps, _, _) | fk_dtor(tps, _, _) { /* FIXME (#2543) */ copy tps } - fk_anon(*) | fk_fn_block(*) { [] } + fk_anon(*) | fk_fn_block(*) { []/~ } } } @@ -58,7 +58,7 @@ type visitor = visit_decl: fn@(@decl, E, vt), visit_expr: fn@(@expr, E, vt), visit_ty: fn@(@ty, E, vt), - visit_ty_params: fn@([ty_param], E, vt), + visit_ty_params: fn@([ty_param]/~, E, vt), visit_constr: fn@(@path, span, node_id, E, vt), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt), visit_class_item: fn@(@class_member, E, vt)}; @@ -256,7 +256,7 @@ fn visit_native_item(ni: @native_item, e: E, v: vt) { } } -fn visit_ty_params(tps: [ty_param], e: E, v: vt) { +fn visit_ty_params(tps: [ty_param]/~, e: E, v: vt) { for tps.each {|tp| for vec::each(*tp.bounds) {|bound| alt bound { @@ -286,7 +286,7 @@ fn visit_method_helper(m: @method, e: E, v: vt) { } // Similar logic to the comment on visit_method_helper - Tim -fn visit_class_ctor_helper(ctor: class_ctor, nm: ident, tps: [ty_param], +fn visit_class_ctor_helper(ctor: class_ctor, nm: ident, tps: [ty_param]/~, parent_id: def_id, e: E, v: vt) { v.visit_fn(fk_ctor(/* FIXME (#2543) */ copy nm, /* FIXME (#2543) */ copy tps, @@ -295,7 +295,7 @@ fn visit_class_ctor_helper(ctor: class_ctor, nm: ident, tps: [ty_param], } -fn visit_class_dtor_helper(dtor: class_dtor, tps: [ty_param], +fn visit_class_dtor_helper(dtor: class_dtor, tps: [ty_param]/~, parent_id: def_id, e: E, v: vt) { v.visit_fn(fk_dtor(/* FIXME (#2543) */ copy tps, dtor.node.self_id, parent_id), ast_util::dtor_dec(), @@ -337,7 +337,7 @@ fn visit_expr_opt(eo: option<@expr>, e: E, v: vt) { alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } } } -fn visit_exprs(exprs: [@expr], e: E, v: vt) { +fn visit_exprs(exprs: [@expr]/~, e: E, v: vt) { for exprs.each {|ex| v.visit_expr(ex, e, v); } } @@ -454,7 +454,7 @@ type simple_visitor = visit_decl: fn@(@decl), visit_expr: fn@(@expr), visit_ty: fn@(@ty), - visit_ty_params: fn@([ty_param]), + visit_ty_params: fn@([ty_param]/~), visit_constr: fn@(@path, span, node_id), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id), visit_class_item: fn@(@class_member)}; @@ -474,7 +474,7 @@ fn default_simple_visitor() -> simple_visitor { visit_decl: fn@(_d: @decl) { }, visit_expr: fn@(_e: @expr) { }, visit_ty: simple_ignore_ty, - visit_ty_params: fn@(_ps: [ty_param]) {}, + visit_ty_params: fn@(_ps: [ty_param]/~) {}, visit_constr: fn@(_p: @path, _sp: span, _id: node_id) { }, visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span, _id: node_id) { }, @@ -533,7 +533,9 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(ty); visit_ty(ty, e, v); } - fn v_ty_params(f: fn@([ty_param]), ps: [ty_param], &&e: (), v: vt<()>) { + fn v_ty_params(f: fn@([ty_param]/~), + ps: [ty_param]/~, + &&e: (), v: vt<()>) { f(ps); visit_ty_params(ps, e, v); } diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs index 4875fbd69ca..44c801fb6a5 100644 --- a/src/rustc/back/link.rs +++ b/src/rustc/back/link.rs @@ -292,27 +292,27 @@ fn build_link_meta(sess: session, c: ast::crate, output: str, type provided_metas = {name: option<@str>, vers: option<@str>, - cmh_items: [@ast::meta_item]}; + cmh_items: [@ast::meta_item]/~}; fn provided_link_metas(sess: session, c: ast::crate) -> provided_metas { let mut name: option<@str> = none; let mut vers: option<@str> = none; - let mut cmh_items: [@ast::meta_item] = []; + let mut cmh_items: [@ast::meta_item]/~ = []/~; let linkage_metas = attr::find_linkage_metas(c.node.attrs); attr::require_unique_names(sess.diagnostic(), linkage_metas); for linkage_metas.each {|meta| if *attr::get_meta_item_name(meta) == "name" { alt attr::get_meta_item_value_str(meta) { some(v) { name = some(v); } - none { cmh_items += [meta]; } + none { cmh_items += [meta]/~; } } } else if *attr::get_meta_item_name(meta) == "vers" { alt attr::get_meta_item_value_str(meta) { some(v) { vers = some(v); } - none { cmh_items += [meta]; } + none { cmh_items += [meta]/~; } } - } else { cmh_items += [meta]; } + } else { cmh_items += [meta]/~; } } ret {name: name, vers: vers, cmh_items: cmh_items}; } @@ -320,7 +320,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str, // This calculates CMH as defined above fn crate_meta_extras_hash(sha: sha1, _crate: ast::crate, metas: provided_metas, - dep_hashes: [@str]) -> str { + dep_hashes: [@str]/~) -> str { fn len_and_str(s: str) -> str { ret #fmt["%u_%s", str::len(s), s]; } @@ -490,7 +490,7 @@ fn mangle(ss: path) -> str { } fn exported_name(path: path, hash: @str, vers: @str) -> str { - ret mangle(path + [path_name(hash)] + [path_name(vers)]); + ret mangle(path + [path_name(hash)]/~ + [path_name(vers)]/~); } fn mangle_exported_name(ccx: @crate_ctxt, path: path, t: ty::t) -> str { @@ -503,12 +503,12 @@ fn mangle_internal_name_by_type_only(ccx: @crate_ctxt, str { let s = @util::ppaux::ty_to_short_str(ccx.tcx, t); let hash = get_symbol_hash(ccx, t); - ret mangle([path_name(name), path_name(s), path_name(@hash)]); + ret mangle([path_name(name), path_name(s), path_name(@hash)]/~); } fn mangle_internal_name_by_path_and_seq(ccx: @crate_ctxt, path: path, flav: @str) -> str { - ret mangle(path + [path_name(@ccx.names(*flav))]); + ret mangle(path + [path_name(@ccx.names(*flav))]/~); } fn mangle_internal_name_by_path(_ccx: @crate_ctxt, path: path) -> str { @@ -577,8 +577,8 @@ fn link_binary(sess: session, // The invocations of cc share some flags across platforms let mut cc_args = - [stage] + sess.targ_cfg.target_strs.cc_args + - ["-o", output, obj_filename]; + [stage]/~ + sess.targ_cfg.target_strs.cc_args + + ["-o", output, obj_filename]/~; let mut lib_cmd; let os = sess.targ_cfg.os; @@ -591,18 +591,18 @@ fn link_binary(sess: session, let cstore = sess.cstore; for cstore::get_used_crate_files(cstore).each {|cratepath| if str::ends_with(cratepath, ".rlib") { - cc_args += [cratepath]; + cc_args += [cratepath]/~; cont; } let cratepath = cratepath; let dir = path::dirname(cratepath); - if dir != "" { cc_args += ["-L" + dir]; } + if dir != "" { cc_args += ["-L" + dir]/~; } let libarg = unlib(sess.targ_cfg, path::basename(cratepath)); - cc_args += ["-l" + libarg]; + cc_args += ["-l" + libarg]/~; } let ula = cstore::get_used_link_args(cstore); - for ula.each {|arg| cc_args += [arg]; } + for ula.each {|arg| cc_args += [arg]/~; } // # Native library linking @@ -613,37 +613,37 @@ fn link_binary(sess: session, // forces to make sure that library can be found at runtime. let addl_paths = sess.opts.addl_lib_search_paths; - for addl_paths.each {|path| cc_args += ["-L" + path]; } + for addl_paths.each {|path| cc_args += ["-L" + path]/~; } // The names of the native libraries let used_libs = cstore::get_used_libraries(cstore); - for used_libs.each {|l| cc_args += ["-l" + l]; } + for used_libs.each {|l| cc_args += ["-l" + l]/~; } if sess.building_library { - cc_args += [lib_cmd]; + cc_args += [lib_cmd]/~; // On mac we need to tell the linker to let this library // be rpathed if sess.targ_cfg.os == session::os_macos { cc_args += ["-Wl,-install_name,@rpath/" - + path::basename(output)]; + + path::basename(output)]/~; } } if !sess.debugging_opt(session::no_rt) { // Always want the runtime linked in - cc_args += ["-lrustrt"]; + cc_args += ["-lrustrt"]/~; } // On linux librt and libdl are an indirect dependencies via rustrt, // and binutils 2.22+ won't add them automatically if sess.targ_cfg.os == session::os_linux { - cc_args += ["-lrt", "-ldl"]; + cc_args += ["-lrt", "-ldl"]/~; // LLVM implements the `frem` instruction as a call to `fmod`, // which lives in libm. Similar to above, on some linuxes we // have to be explicit about linking to it. See #2510 - cc_args += ["-lm"]; + cc_args += ["-lm"]/~; } if sess.targ_cfg.os == session::os_freebsd { @@ -653,7 +653,7 @@ fn link_binary(sess: session, "-L/usr/local/lib/gcc44", "-lstdc++", "-Wl,-z,origin", "-Wl,-rpath,/usr/local/lib/gcc46", - "-Wl,-rpath,/usr/local/lib/gcc44"]; + "-Wl,-rpath,/usr/local/lib/gcc44"]/~; } // OS X 10.6 introduced 'compact unwind info', which is produced by the @@ -661,11 +661,11 @@ fn link_binary(sess: session, // understand how to unwind our __morestack frame, so we have to turn it // off. This has impacted some other projects like GHC. if sess.targ_cfg.os == session::os_macos { - cc_args += ["-Wl,-no_compact_unwind"]; + cc_args += ["-Wl,-no_compact_unwind"]/~; } // Stack growth requires statically linking a __morestack function - cc_args += ["-lmorestack"]; + cc_args += ["-lmorestack"]/~; // FIXME (#2397): At some point we want to rpath our guesses as to where // native libraries might live, based on the addl_lib_search_paths @@ -685,7 +685,7 @@ fn link_binary(sess: session, // Clean up on Darwin if sess.targ_cfg.os == session::os_macos { - run::run_program("dsymutil", [output]); + run::run_program("dsymutil", [output]/~); } // Remove the temporary object file if we aren't saving temps diff --git a/src/rustc/back/rpath.rs b/src/rustc/back/rpath.rs index e37d4270215..dbe70a8085f 100644 --- a/src/rustc/back/rpath.rs +++ b/src/rustc/back/rpath.rs @@ -13,12 +13,12 @@ pure fn not_win32(os: session::os) -> bool { } } -fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] { +fn get_rpath_flags(sess: session::session, out_filename: str) -> [str]/~ { let os = sess.targ_cfg.os; // No rpath on windows if os == session::os_win32 { - ret []; + ret []/~; } #debug("preparing the RPATH!"); @@ -29,7 +29,7 @@ fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] { let libs = cstore::get_used_crate_files(sess.cstore); // We don't currently rpath native libraries, but we know // where rustrt is and we know every rust program needs it - let libs = libs + [get_sysroot_absolute_rt_lib(sess)]; + let libs = libs + [get_sysroot_absolute_rt_lib(sess)]/~; let target_triple = sess.opts.target_triple; let rpaths = get_rpaths(os, cwd, sysroot, output, libs, target_triple); @@ -37,20 +37,20 @@ fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] { } fn get_sysroot_absolute_rt_lib(sess: session::session) -> path::path { - let path = [sess.filesearch.sysroot()] + let path = [sess.filesearch.sysroot()]/~ + filesearch::relative_target_lib_path( sess.opts.target_triple) - + [os::dll_filename("rustrt")]; + + [os::dll_filename("rustrt")]/~; path::connect_many(path) } -fn rpaths_to_flags(rpaths: [str]) -> [str] { +fn rpaths_to_flags(rpaths: [str]/~) -> [str]/~ { vec::map(rpaths, { |rpath| #fmt("-Wl,-rpath,%s",rpath)}) } fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path, - output: path::path, libs: [path::path], - target_triple: str) -> [str] { + output: path::path, libs: [path::path]/~, + target_triple: str) -> [str]/~ { #debug("cwd: %s", cwd); #debug("sysroot: %s", sysroot); #debug("output: %s", output); @@ -70,9 +70,9 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path, let abs_rpaths = get_absolute_rpaths(cwd, libs); // And a final backup rpath to the global library location. - let fallback_rpaths = [get_install_prefix_rpath(cwd, target_triple)]; + let fallback_rpaths = [get_install_prefix_rpath(cwd, target_triple)]/~; - fn log_rpaths(desc: str, rpaths: [str]) { + fn log_rpaths(desc: str, rpaths: [str]/~) { #debug("%s rpaths:", desc); for rpaths.each {|rpath| #debug(" %s", rpath); @@ -93,7 +93,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path, fn get_rpaths_relative_to_output(os: session::os, cwd: path::path, output: path::path, - libs: [path::path]) -> [str] { + libs: [path::path]/~) -> [str]/~ { vec::map(libs, {|a| check not_win32(os); get_rpath_relative_to_output(os, cwd, output, a) @@ -139,8 +139,8 @@ fn get_relative_to(abs1: path::path, abs2: path::path) -> path::path { start_idx += 1u; } - let mut path = []; - for uint::range(start_idx, len1 - 1u) {|_i| path += [".."]; }; + let mut path = []/~; + for uint::range(start_idx, len1 - 1u) {|_i| vec::push(path, ".."); }; path += vec::slice(split2, start_idx, len2 - 1u); @@ -151,7 +151,7 @@ fn get_relative_to(abs1: path::path, abs2: path::path) -> path::path { } } -fn get_absolute_rpaths(cwd: path::path, libs: [path::path]) -> [str] { +fn get_absolute_rpaths(cwd: path::path, libs: [path::path]/~) -> [str]/~ { vec::map(libs, {|a|get_absolute_rpath(cwd, a)}) } @@ -174,17 +174,17 @@ fn get_install_prefix_rpath(cwd: path::path, target_triple: str) -> str { fail "rustc compiled without CFG_PREFIX environment variable"; } - let path = [install_prefix] + let path = [install_prefix]/~ + filesearch::relative_target_lib_path(target_triple); get_absolute(cwd, path::connect_many(path)) } -fn minimize_rpaths(rpaths: [str]) -> [str] { +fn minimize_rpaths(rpaths: [str]/~) -> [str]/~ { let set = map::str_hash::<()>(); - let mut minimized = []; + let mut minimized = []/~; for rpaths.each {|rpath| if !set.contains_key(rpath) { - minimized += [rpath]; + minimized += [rpath]/~; set.insert(rpath, ()); } } @@ -195,8 +195,8 @@ fn minimize_rpaths(rpaths: [str]) -> [str] { mod test { #[test] fn test_rpaths_to_flags() { - let flags = rpaths_to_flags(["path1", "path2"]); - assert flags == ["-Wl,-rpath,path1", "-Wl,-rpath,path2"]; + let flags = rpaths_to_flags(["path1", "path2"]/~); + assert flags == ["-Wl,-rpath,path1", "-Wl,-rpath,path2"]/~; } #[test] @@ -230,15 +230,15 @@ mod test { #[test] fn test_minimize1() { - let res = minimize_rpaths(["rpath1", "rpath2", "rpath1"]); - assert res == ["rpath1", "rpath2"]; + let res = minimize_rpaths(["rpath1", "rpath2", "rpath1"]/~); + assert res == ["rpath1", "rpath2"]/~; } #[test] fn test_minimize2() { let res = minimize_rpaths(["1a", "2", "2", "1a", "4a", - "1a", "2", "3", "4a", "3"]); - assert res == ["1a", "2", "4a", "3"]; + "1a", "2", "3", "4a", "3"]/~); + assert res == ["1a", "2", "4a", "3"]/~; } #[test] diff --git a/src/rustc/back/target_strs.rs b/src/rustc/back/target_strs.rs index cc40746b71c..24665c2ab36 100644 --- a/src/rustc/back/target_strs.rs +++ b/src/rustc/back/target_strs.rs @@ -3,5 +3,5 @@ type t = { meta_sect_name: str, data_layout: str, target_triple: str, - cc_args: [str] + cc_args: [str]/~ }; diff --git a/src/rustc/back/upcall.rs b/src/rustc/back/upcall.rs index 2cae381431d..98dc3200b89 100644 --- a/src/rustc/back/upcall.rs +++ b/src/rustc/back/upcall.rs @@ -33,10 +33,10 @@ fn declare_upcalls(targ_cfg: @session::config, tydesc_type: TypeRef, llmod: ModuleRef) -> @upcalls { fn decl(llmod: ModuleRef, prefix: str, name: str, - tys: [TypeRef], rv: TypeRef) -> + tys: [TypeRef]/~, rv: TypeRef) -> ValueRef { - let mut arg_tys: [TypeRef] = []; - for tys.each {|t| arg_tys += [t]; } + let mut arg_tys: [TypeRef]/~ = []/~; + for tys.each {|t| arg_tys += [t]/~; } let fn_ty = T_fn(arg_tys, rv); ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty); } @@ -51,61 +51,61 @@ fn declare_upcalls(targ_cfg: @session::config, ret @{_fail: dv("fail", [T_ptr(T_i8()), T_ptr(T_i8()), - size_t]), + size_t]/~), trace: dv("trace", [T_ptr(T_i8()), T_ptr(T_i8()), - int_t]), + int_t]/~), malloc: nothrow(d("malloc", - [T_ptr(tydesc_type), int_t], + [T_ptr(tydesc_type), int_t]/~, T_ptr(T_i8()))), free: - nothrow(dv("free", [T_ptr(T_i8())])), + nothrow(dv("free", [T_ptr(T_i8())]/~)), exchange_malloc: nothrow(d("exchange_malloc", - [T_ptr(tydesc_type), int_t], + [T_ptr(tydesc_type), int_t]/~, T_ptr(T_i8()))), exchange_free: - nothrow(dv("exchange_free", [T_ptr(T_i8())])), + nothrow(dv("exchange_free", [T_ptr(T_i8())]/~)), validate_box: - nothrow(dv("validate_box", [T_ptr(T_i8())])), + nothrow(dv("validate_box", [T_ptr(T_i8())]/~)), mark: - d("mark", [T_ptr(T_i8())], int_t), + d("mark", [T_ptr(T_i8())]/~, int_t), vec_grow: - nothrow(dv("vec_grow", [T_ptr(T_ptr(T_i8())), int_t])), + nothrow(dv("vec_grow", [T_ptr(T_ptr(T_i8())), int_t]/~)), str_new_uniq: - nothrow(d("str_new_uniq", [T_ptr(T_i8()), int_t], + nothrow(d("str_new_uniq", [T_ptr(T_i8()), int_t]/~, T_ptr(T_i8()))), str_new_shared: - nothrow(d("str_new_shared", [T_ptr(T_i8()), int_t], + nothrow(d("str_new_shared", [T_ptr(T_i8()), int_t]/~, T_ptr(T_i8()))), str_concat: nothrow(d("str_concat", [T_ptr(T_i8()), - T_ptr(T_i8())], + T_ptr(T_i8())]/~, T_ptr(T_i8()))), cmp_type: dv("cmp_type", [T_ptr(T_i1()), T_ptr(tydesc_type), T_ptr(T_ptr(tydesc_type)), T_ptr(T_i8()), T_ptr(T_i8()), - T_i8()]), + T_i8()]/~), log_type: dv("log_type", [T_ptr(tydesc_type), - T_ptr(T_i8()), T_i32()]), + T_ptr(T_i8()), T_i32()]/~), alloc_c_stack: - d("alloc_c_stack", [size_t], T_ptr(T_i8())), + d("alloc_c_stack", [size_t]/~, T_ptr(T_i8())), call_shim_on_c_stack: d("call_shim_on_c_stack", // arguments: void *args, void *fn_ptr - [T_ptr(T_i8()), T_ptr(T_i8())], + [T_ptr(T_i8()), T_ptr(T_i8())]/~, int_t), call_shim_on_rust_stack: d("call_shim_on_rust_stack", - [T_ptr(T_i8()), T_ptr(T_i8())], int_t), + [T_ptr(T_i8()), T_ptr(T_i8())]/~, int_t), rust_personality: - nothrow(d("rust_personality", [], T_i32())), + nothrow(d("rust_personality", []/~, T_i32())), reset_stack_limit: - nothrow(dv("reset_stack_limit", [])) + nothrow(dv("reset_stack_limit", []/~)) }; } // diff --git a/src/rustc/back/x86.rs b/src/rustc/back/x86.rs index a7febe457dd..df68aee37e5 100644 --- a/src/rustc/back/x86.rs +++ b/src/rustc/back/x86.rs @@ -35,7 +35,7 @@ fn get_target_strs(target_os: session::os) -> target_strs::t { session::os_freebsd { "i686-unknown-freebsd" } }, - cc_args: ["-m32"] + cc_args: ["-m32"]/~ }; } diff --git a/src/rustc/back/x86_64.rs b/src/rustc/back/x86_64.rs index 3210a2333f4..6936b3cec7d 100644 --- a/src/rustc/back/x86_64.rs +++ b/src/rustc/back/x86_64.rs @@ -42,7 +42,7 @@ fn get_target_strs(target_os: session::os) -> target_strs::t { session::os_freebsd { "x86_64-unknown-freebsd" } }, - cc_args: ["-m64"] + cc_args: ["-m64"]/~ }; } diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index 553db7695a8..13c0035011c 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -56,7 +56,7 @@ fn default_configuration(sess: session, argv0: str, input: input) -> mk(@"target_libc", libc), // Build bindings. mk(@"build_compiler", argv0), - mk(@"build_input", source_name(input))]; + mk(@"build_input", source_name(input))]/~; } fn build_configuration(sess: session, argv0: str, input: input) -> @@ -70,19 +70,19 @@ fn build_configuration(sess: session, argv0: str, input: input) -> { if sess.opts.test && !attr::contains_name(user_cfg, "test") { - [attr::mk_word_item(@"test")] - } else { [] } + [attr::mk_word_item(@"test")]/~ + } else { []/~ } }; ret user_cfg + gen_cfg + default_cfg; } // Convert strings provided as --cfg [cfgspec] into a crate_cfg -fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg { +fn parse_cfgspecs(cfgspecs: [str]/~) -> ast::crate_cfg { // FIXME (#2399): It would be nice to use the parser to parse all // varieties of meta_item here. At the moment we just support the // meta_word variant. - let mut words = []; - for cfgspecs.each {|s| words += [attr::mk_word_item(@s)]; } + let mut words = []/~; + for cfgspecs.each {|s| vec::push(words, attr::mk_word_item(@s)); } ret words; } @@ -563,7 +563,7 @@ fn parse_pretty(sess: session, &&name: str) -> pp_mode { "`identified`"); } -fn opts() -> [getopts::opt] { +fn opts() -> [getopts::opt]/~ { ret [optflag("h"), optflag("help"), optflag("v"), optflag("version"), optflag("emit-llvm"), optflagopt("pretty"), optflag("ls"), optflag("parse-only"), optflag("no-trans"), @@ -577,7 +577,7 @@ fn opts() -> [getopts::opt] { optmulti("Z"), optmulti("cfg"), optflag("test"), - optflag("lib"), optflag("bin"), optflag("static"), optflag("gc")]; + optflag("lib"), optflag("bin"), optflag("static"), optflag("gc")]/~; } type output_filenames = @{out_filename: str, obj_filename:str}; @@ -692,7 +692,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let match = - alt getopts::getopts(["--test"], opts()) { + alt getopts::getopts(["--test"]/~, opts()) { ok(m) { m } err(f) { fail "test_switch_implies_cfg_test: " + getopts::fail_str(f); } @@ -708,7 +708,7 @@ mod test { #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { let match = - alt getopts::getopts(["--test", "--cfg=test"], opts()) { + alt getopts::getopts(["--test", "--cfg=test"]/~, opts()) { ok(m) { m } err(f) { fail "test_switch_implies_cfg_test_unless_cfg_test: " + getopts::fail_str(f); } diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 634d5587f17..1d99f3a7012 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -109,7 +109,7 @@ fn describe_debug_flags() { } } -fn run_compiler(args: [str], demitter: diagnostic::emitter) { +fn run_compiler(args: [str]/~, demitter: diagnostic::emitter) { // Don't display log spew by default. Can override with RUST_LOG. logging::console_off(); @@ -250,7 +250,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { to get further details and report the results \ to github.com/mozilla/rust/issues" - ].each {|note| + ]/~.each {|note| diagnostic::emit(none, note, diagnostic::note) } } @@ -260,7 +260,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { } } -fn main(args: [str]) { +fn main(args: [str]/~) { monitor {|demitter| run_compiler(args, demitter); } diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs index 9645d919b04..263343d63d7 100644 --- a/src/rustc/driver/session.rs +++ b/src/rustc/driver/session.rs @@ -36,7 +36,7 @@ const trace: uint = 128u; // It should be removed const no_rt: uint = 256u; -fn debugging_opts_map() -> [(str, str, uint)] { +fn debugging_opts_map() -> [(str, str, uint)]/~ { [("ppregions", "prettyprint regions with \ internal repr details", ppregions), ("time-passes", "measure time of each rustc pass", time_passes), @@ -48,7 +48,7 @@ fn debugging_opts_map() -> [(str, str, uint)] { ("no-verify", "skip LLVM verification", no_verify), ("trace", "emit trace logs", trace), ("no-rt", "do not link to the runtime", no_rt) - ] + ]/~ } type options = @@ -59,10 +59,10 @@ type options = optimize: uint, debuginfo: bool, extra_debuginfo: bool, - lint_opts: [(lint::lint, lint::level)], + lint_opts: [(lint::lint, lint::level)]/~, save_temps: bool, output_type: back::link::output_type, - addl_lib_search_paths: [str], + addl_lib_search_paths: [str]/~, maybe_sysroot: option, target_triple: str, cfg: ast::crate_cfg, @@ -72,7 +72,7 @@ type options = debugging_opts: uint, }; -type crate_metadata = {name: str, data: [u8]}; +type crate_metadata = {name: str, data: [u8]/~}; type session = @{targ_cfg: @config, opts: @options, @@ -172,13 +172,13 @@ fn basic_options() -> @options { optimize: 0u, debuginfo: false, extra_debuginfo: false, - lint_opts: [], + lint_opts: []/~, save_temps: false, output_type: link::output_type_exe, - addl_lib_search_paths: [], + addl_lib_search_paths: []/~, maybe_sysroot: none, target_triple: driver::host_triple(), - cfg: [], + cfg: []/~, test: false, parse_only: false, no_trans: false, @@ -238,14 +238,14 @@ mod test { } fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate { - let mut attrs = []; - if with_bin { attrs += [make_crate_type_attr("bin")]; } - if with_lib { attrs += [make_crate_type_attr("lib")]; } + let mut attrs = []/~; + if with_bin { attrs += [make_crate_type_attr("bin")]/~; } + if with_lib { attrs += [make_crate_type_attr("lib")]/~; } @ast_util::respan(ast_util::dummy_sp(), { - directives: [], - module: {view_items: [], items: []}, + directives: []/~, + module: {view_items: []/~, items: []/~}, attrs: attrs, - config: [] + config: []/~ }) } diff --git a/src/rustc/front/config.rs b/src/rustc/front/config.rs index 57c1f587f8c..079e8fb71cb 100644 --- a/src/rustc/front/config.rs +++ b/src/rustc/front/config.rs @@ -4,7 +4,7 @@ export strip_unconfigured_items; export metas_in_cfg; export strip_items; -type in_cfg_pred = fn@([ast::attribute]) -> bool; +type in_cfg_pred = fn@([ast::attribute]/~) -> bool; type ctxt = @{ in_cfg: in_cfg_pred @@ -100,11 +100,11 @@ fn native_item_in_cfg(cx: ctxt, item: @ast::native_item) -> bool { // Determine if an item should be translated in the current crate // configuration based on the item's attributes -fn in_cfg(cfg: ast::crate_cfg, attrs: [ast::attribute]) -> bool { +fn in_cfg(cfg: ast::crate_cfg, attrs: [ast::attribute]/~) -> bool { metas_in_cfg(cfg, attr::attr_metas(attrs)) } -fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]) -> bool { +fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]/~) -> bool { // The "cfg" attributes on the item let cfg_metas = attr::find_meta_items_by_name(metas, "cfg"); diff --git a/src/rustc/front/core_inject.rs b/src/rustc/front/core_inject.rs index bc7048f8f09..c15f08e561f 100644 --- a/src/rustc/front/core_inject.rs +++ b/src/rustc/front/core_inject.rs @@ -30,18 +30,18 @@ fn inject_libcore_ref(sess: session, let n1 = sess.next_node_id(); let n2 = sess.next_node_id(); - let vi1 = @{node: ast::view_item_use(@"core", [], n1), - attrs: [], + let vi1 = @{node: ast::view_item_use(@"core", []/~, n1), + attrs: []/~, vis: ast::public, span: dummy_sp()}; let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @"core"), n2)); - let vi2 = @{node: ast::view_item_import([vp]), - attrs: [], + let vi2 = @{node: ast::view_item_import([vp]/~), + attrs: []/~, vis: ast::public, span: dummy_sp()}; - let vis = [vi1, vi2] + crate.node.module.view_items; + let vis = [vi1, vi2]/~ + crate.node.module.view_items; ret @{node: {module: { view_items: vis with crate.node.module } with crate.node} with *crate } diff --git a/src/rustc/front/intrinsic_inject.rs b/src/rustc/front/intrinsic_inject.rs index 8919265fb87..06f8bbe92c9 100644 --- a/src/rustc/front/intrinsic_inject.rs +++ b/src/rustc/front/intrinsic_inject.rs @@ -12,7 +12,7 @@ fn inject_intrinsic(sess: session, let item = parse::parse_item_from_source_str("", intrinsic_module, sess.opts.cfg, - [], ast::public, + []/~, ast::public, sess.parse_sess); let item = alt item { @@ -22,7 +22,7 @@ fn inject_intrinsic(sess: session, } }; - let items = [item] + crate.node.module.items; + let items = [item]/~ + crate.node.module.items; ret @{node: {module: { items: items with crate.node.module } with crate.node} with *crate } diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs index 3f5fca12e2a..d9240dc8877 100644 --- a/src/rustc/front/test.rs +++ b/src/rustc/front/test.rs @@ -15,12 +15,13 @@ export modify_for_testing; type node_id_gen = fn@() -> ast::node_id; -type test = {span: span, path: [ast::ident], ignore: bool, should_fail: bool}; +type test = {span: span, path: [ast::ident]/~, + ignore: bool, should_fail: bool}; type test_ctxt = @{sess: session::session, crate: @ast::crate, - mut path: [ast::ident], + mut path: [ast::ident]/~, testfns: dvec}; // Traverse the crate, collecting all the test functions, eliding any @@ -40,7 +41,7 @@ fn generate_test_harness(sess: session::session, let cx: test_ctxt = @{sess: sess, crate: crate, - mut path: [], + mut path: []/~, testfns: dvec()}; let precursor = @@ -97,7 +98,7 @@ fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) -> fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) -> @ast::item { - cx.path += [i.ident]; + cx.path += [i.ident]/~; #debug("current path: %s", ast_util::path_name_i(cx.path)); if is_test_fn(i) { @@ -160,7 +161,7 @@ fn should_fail(i: @ast::item) -> bool { fn add_test_module(cx: test_ctxt, m: ast::_mod) -> ast::_mod { let testmod = mk_test_module(cx); - ret {items: m.items + [testmod] with m}; + ret {items: m.items + [testmod]/~ with m}; } /* @@ -169,11 +170,11 @@ We're going to be building a module that looks more or less like: mod __test { - fn main(args: [str]) -> int { + fn main(args: [str]/~) -> int { std::test::test_main(args, tests()) } - fn tests() -> [std::test::test_desc] { + fn tests() -> [std::test::test_desc]/~ { ... the list of tests in the crate ... } } @@ -187,14 +188,14 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item { // The synthesized main function which will call the console test runner // with our list of tests let mainfn = mk_main(cx); - let testmod: ast::_mod = {view_items: [], items: [mainfn, testsfn]}; + let testmod: ast::_mod = {view_items: []/~, items: [mainfn, testsfn]/~}; let item_ = ast::item_mod(testmod); // This attribute tells resolve to let us call unexported functions let resolve_unexported_attr = attr::mk_attr(attr::mk_word_item(@"!resolve_unexported")); let item: ast::item = {ident: @"__test", - attrs: [resolve_unexported_attr], + attrs: [resolve_unexported_attr]/~, id: cx.sess.next_node_id(), node: item_, vis: ast::public, @@ -209,31 +210,31 @@ fn nospan(t: T) -> ast::spanned { ret {node: t, span: dummy_sp()}; } -fn path_node(ids: [ast::ident]) -> @ast::path { - @{span: dummy_sp(), global: false, idents: ids, rp: none, types: []} +fn path_node(ids: [ast::ident]/~) -> @ast::path { + @{span: dummy_sp(), global: false, idents: ids, rp: none, types: []/~} } fn mk_tests(cx: test_ctxt) -> @ast::item { let ret_ty = mk_test_desc_vec_ty(cx); let decl: ast::fn_decl = - {inputs: [], + {inputs: []/~, output: ret_ty, purity: ast::impure_fn, cf: ast::return_val, - constraints: []}; + constraints: []/~}; // The vector of test_descs for this crate let test_descs = mk_test_desc_vec(cx); let body_: ast::blk_ = - default_block([], option::some(test_descs), cx.sess.next_node_id()); + default_block([]/~, option::some(test_descs), cx.sess.next_node_id()); let body = nospan(body_); - let item_ = ast::item_fn(decl, [], body); + let item_ = ast::item_fn(decl, []/~, body); let item: ast::item = {ident: @"tests", - attrs: [], + attrs: []/~, id: cx.sess.next_node_id(), node: item_, vis: ast::public, @@ -241,7 +242,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item { ret @item; } -fn mk_path(cx: test_ctxt, path: [ast::ident]) -> [ast::ident] { +fn mk_path(cx: test_ctxt, path: [ast::ident]/~) -> [ast::ident]/~ { // For tests that are inside of std we don't want to prefix // the paths with std:: let is_std = { @@ -251,12 +252,12 @@ fn mk_path(cx: test_ctxt, path: [ast::ident]) -> [ast::ident] { _ { false } } }; - (if is_std { [] } else { [@"std"] }) + path + (if is_std { []/~ } else { [@"std"]/~ }) + path } -// The ast::ty of [std::test::test_desc] +// The ast::ty of [std::test::test_desc]/~ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty { - let test_desc_ty_path = path_node(mk_path(cx, [@"test", @"test_desc"])); + let test_desc_ty_path = path_node(mk_path(cx, [@"test", @"test_desc"]/~)); let test_desc_ty: ast::ty = {id: cx.sess.next_node_id(), @@ -275,9 +276,9 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty { fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr { #debug("building test vector from %u tests", cx.testfns.len()); - let mut descs = []; + let mut descs = []/~; for cx.testfns.each {|test| - descs += [mk_test_desc_rec(cx, test)]; + descs += [mk_test_desc_rec(cx, test)]/~; } let inner_expr = @{id: cx.sess.next_node_id(), @@ -337,7 +338,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { nospan({mutbl: ast::m_imm, ident: @"should_fail", expr: @fail_expr}); let desc_rec_: ast::expr_ = - ast::expr_rec([name_field, fn_field, ignore_field, fail_field], + ast::expr_rec([name_field, fn_field, ignore_field, fail_field]/~, option::none); let desc_rec: ast::expr = {id: cx.sess.next_node_id(), node: desc_rec_, span: span}; @@ -352,7 +353,7 @@ fn mk_test_wrapper(cx: test_ctxt, span: span) -> @ast::expr { let call_expr: ast::expr = { id: cx.sess.next_node_id(), - node: ast::expr_call(@fn_path_expr, [], false), + node: ast::expr_call(@fn_path_expr, []/~, false), span: span }; @@ -360,16 +361,16 @@ fn mk_test_wrapper(cx: test_ctxt, ast::stmt_semi(@call_expr, cx.sess.next_node_id())); let wrapper_decl: ast::fn_decl = { - inputs: [], + inputs: []/~, output: @{id: cx.sess.next_node_id(), node: ast::ty_nil, span: span}, purity: ast::impure_fn, cf: ast::return_val, - constraints: [] + constraints: []/~ }; let wrapper_body: ast::blk = nospan({ - view_items: [], - stmts: [@call_stmt], + view_items: []/~, + stmts: [@call_stmt]/~, expr: option::none, id: cx.sess.next_node_id(), rules: ast::default_blk @@ -378,7 +379,7 @@ fn mk_test_wrapper(cx: test_ctxt, let wrapper_expr: ast::expr = { id: cx.sess.next_node_id(), node: ast::expr_fn(ast::proto_bare, wrapper_decl, - wrapper_body, @[]), + wrapper_body, @[]/~), span: span }; @@ -386,7 +387,7 @@ fn mk_test_wrapper(cx: test_ctxt, } fn mk_main(cx: test_ctxt) -> @ast::item { - let str_pt = path_node([@"str"]); + let str_pt = path_node([@"str"]/~); let str_ty = @{id: cx.sess.next_node_id(), node: ast::ty_path(str_pt, cx.sess.next_node_id()), span: dummy_sp()}; @@ -410,23 +411,23 @@ fn mk_main(cx: test_ctxt) -> @ast::item { span: dummy_sp()}; let decl: ast::fn_decl = - {inputs: [args_arg], + {inputs: [args_arg]/~, output: @ret_ty, purity: ast::impure_fn, cf: ast::return_val, - constraints: []}; + constraints: []/~}; let test_main_call_expr = mk_test_main_call(cx); let body_: ast::blk_ = - default_block([], option::some(test_main_call_expr), + default_block([]/~, option::some(test_main_call_expr), cx.sess.next_node_id()); let body = {node: body_, span: dummy_sp()}; - let item_ = ast::item_fn(decl, [], body); + let item_ = ast::item_fn(decl, []/~, body); let item: ast::item = {ident: @"main", - attrs: [], + attrs: []/~, id: cx.sess.next_node_id(), node: item_, vis: ast::public, @@ -437,7 +438,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item { fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { // Get the args passed to main so we can pass the to test_main - let args_path = path_node([@"args"]); + let args_path = path_node([@"args"]/~); let args_path_expr_: ast::expr_ = ast::expr_path(args_path); @@ -445,20 +446,20 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { {id: cx.sess.next_node_id(), node: args_path_expr_, span: dummy_sp()}; // Call __test::test to generate the vector of test_descs - let test_path = path_node([@"tests"]); + let test_path = path_node([@"tests"]/~); let test_path_expr_: ast::expr_ = ast::expr_path(test_path); let test_path_expr: ast::expr = {id: cx.sess.next_node_id(), node: test_path_expr_, span: dummy_sp()}; - let test_call_expr_ = ast::expr_call(@test_path_expr, [], false); + let test_call_expr_ = ast::expr_call(@test_path_expr, []/~, false); let test_call_expr: ast::expr = {id: cx.sess.next_node_id(), node: test_call_expr_, span: dummy_sp()}; // Call std::test::test_main - let test_main_path = path_node(mk_path(cx, [@"test", @"test_main"])); + let test_main_path = path_node(mk_path(cx, [@"test", @"test_main"]/~)); let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path); @@ -468,7 +469,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { let test_main_call_expr_: ast::expr_ = ast::expr_call(@test_main_path_expr, - [@args_path_expr, @test_call_expr], false); + [@args_path_expr, @test_call_expr]/~, false); let test_main_call_expr: ast::expr = {id: cx.sess.next_node_id(), node: test_main_call_expr_, diff --git a/src/rustc/lib/llvm.rs b/src/rustc/lib/llvm.rs index 1c5d41b0939..e43f73697c7 100644 --- a/src/rustc/lib/llvm.rs +++ b/src/rustc/lib/llvm.rs @@ -981,21 +981,22 @@ fn mk_type_names() -> type_names { } fn type_to_str(names: type_names, ty: TypeRef) -> str { - ret type_to_str_inner(names, [], ty); + ret type_to_str_inner(names, []/~, ty); } -fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) -> +fn type_to_str_inner(names: type_names, outer0: [TypeRef]/~, ty: TypeRef) -> str { alt type_has_name(names, ty) { option::some(n) { ret n; } _ {} } - let outer = outer0 + [ty]; + let outer = outer0 + [ty]/~; let kind = llvm::LLVMGetTypeKind(ty); - fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str { + fn tys_str(names: type_names, outer: [TypeRef]/~, + tys: [TypeRef]/~) -> str { let mut s: str = ""; let mut first: bool = true; for tys.each {|t| @@ -1021,7 +1022,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) -> let mut s = "fn("; let out_ty: TypeRef = llvm::LLVMGetReturnType(ty); let n_args = llvm::LLVMCountParamTypes(ty) as uint; - let args: [TypeRef] = vec::from_elem::(n_args, 0 as TypeRef); + let args = vec::from_elem(n_args, 0 as TypeRef); unsafe { llvm::LLVMGetParamTypes(ty, vec::unsafe::to_ptr(args)); } @@ -1033,7 +1034,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) -> Struct { let mut s: str = "{"; let n_elts = llvm::LLVMCountStructElementTypes(ty) as uint; - let elts: [TypeRef] = vec::from_elem::(n_elts, 0 as TypeRef); + let elts = vec::from_elem(n_elts, 0 as TypeRef); unsafe { llvm::LLVMGetStructElementTypes(ty, vec::unsafe::to_ptr(elts)); } @@ -1082,7 +1083,7 @@ fn float_width(llt: TypeRef) -> uint { }; } -fn fn_ty_param_tys(fn_ty: TypeRef) -> [TypeRef] unsafe { +fn fn_ty_param_tys(fn_ty: TypeRef) -> [TypeRef]/~ unsafe { let args = vec::from_elem(llvm::LLVMCountParamTypes(fn_ty) as uint, 0 as TypeRef); llvm::LLVMGetParamTypes(fn_ty, vec::unsafe::to_ptr(args)); diff --git a/src/rustc/metadata/astencode.rs b/src/rustc/metadata/astencode.rs index b6b0060b5fb..391ab8bfda4 100644 --- a/src/rustc/metadata/astencode.rs +++ b/src/rustc/metadata/astencode.rs @@ -68,7 +68,7 @@ fn encode_inlined_item(ecx: @e::encode_ctxt, ii: ast::inlined_item) { #debug["> Encoding inlined item: %s::%s (%u)", ast_map::path_to_str(path), ii.ident(), - ebml_w.writer.tell()]; + ebml_w.writer.tell()]/~; let id_range = compute_id_range_for_inlined_item(ii); ebml_w.wr_tag(c::tag_ast as uint) {|| @@ -79,7 +79,7 @@ fn encode_inlined_item(ecx: @e::encode_ctxt, #debug["< Encoded inlined fn: %s::%s (%u)", ast_map::path_to_str(path), ii.ident(), - ebml_w.writer.tell()]; + ebml_w.writer.tell()]/~; } fn decode_inlined_item(cdata: cstore::crate_metadata, @@ -103,7 +103,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata, #debug["Fn named: %s", ii.ident()]; decode_side_tables(xcx, ast_doc); #debug["< Decoded inlined fn: %s::%s", - ast_map::path_to_str(path), ii.ident()]; + ast_map::path_to_str(path), ii.ident()]/~; alt ii { ast::ii_item(i) { #debug(">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<", @@ -522,7 +522,7 @@ impl helpers for ebml::writer { e::write_type(ecx, self, ty) } - fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]) { + fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]/~) { self.emit_from_vec(tys) {|ty| e::write_type(ecx, self, ty) } @@ -707,7 +707,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt, impl decoder for ebml::doc { fn as_int() -> int { ebml::doc_as_u64(self) as int } - fn [](tag: c::astencode_tag) -> ebml::doc { + fn []/~(tag: c::astencode_tag) -> ebml::doc { ebml::get_doc(self, tag as uint) } fn opt_child(tag: c::astencode_tag) -> option { @@ -727,11 +727,11 @@ impl decoder for ebml::ebml_deserializer { xcx.tr_def_id(_)) } - fn read_tys(xcx: extended_decode_ctxt) -> [ty::t] { + fn read_tys(xcx: extended_decode_ctxt) -> [ty::t]/~ { self.read_to_vec {|| self.read_ty(xcx) } } - fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound] { + fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound]/~ { tydecode::parse_bounds_data( self.parent.data, self.pos, xcx.dcx.cdata.cnum, xcx.dcx.tcx, xcx.tr_def_id(_)) @@ -765,7 +765,7 @@ fn decode_side_tables(xcx: extended_decode_ctxt, #debug[">> Side table document with tag 0x%x \ found for id %d (orig %d)", - tag, id, id0]; + tag, id, id0]/~; if tag == (c::tag_table_mutbl as uint) { dcx.maps.mutbl_map.insert(id, ()); @@ -859,7 +859,7 @@ type fake_session = (); #[cfg(test)] impl of fake_ext_ctxt for fake_session { - fn cfg() -> ast::crate_cfg { [] } + fn cfg() -> ast::crate_cfg { []/~ } fn parse_sess() -> parse::parse_sess { new_parse_sess() } } @@ -922,13 +922,13 @@ fn test_simplification() { let item_in = ast::ii_item(#ast(item) { fn new_int_alist() -> alist { fn eq_int(&&a: int, &&b: int) -> bool { a == b } - ret {eq_fn: eq_int, mut data: []}; + ret {eq_fn: eq_int, mut data: []/~}; } }); let item_out = simplify_ast(item_in); let item_exp = ast::ii_item(#ast(item) { fn new_int_alist() -> alist { - ret {eq_fn: eq_int, mut data: []}; + ret {eq_fn: eq_int, mut data: []/~}; } }); alt (item_out, item_exp) { diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs index c63d03ce239..c9146135bca 100644 --- a/src/rustc/metadata/creader.rs +++ b/src/rustc/metadata/creader.rs @@ -43,7 +43,7 @@ type cache_entry = { cnum: int, span: span, hash: @str, - metas: @[@ast::meta_item] + metas: @[@ast::meta_item]/~ }; fn dump_crates(crate_cache: dvec) { @@ -54,7 +54,7 @@ fn dump_crates(crate_cache: dvec) { #debug("hash: %?", entry.hash); let attrs = [ attr::mk_attr(attr::mk_list_item(@"link", *entry.metas)) - ]; + ]/~; for attr::find_linkage_attrs(attrs).each {|attr| #debug("meta: %s", pprust::attr_to_str(attr)); } @@ -62,7 +62,7 @@ fn dump_crates(crate_cache: dvec) { } fn warn_if_multiple_versions(diag: span_handler, - crate_cache: [cache_entry]) { + crate_cache: [cache_entry]/~) { import either::*; if crate_cache.len() != 0u { @@ -86,7 +86,7 @@ fn warn_if_multiple_versions(diag: span_handler, diag.span_note(match.span, "used here"); let attrs = [ attr::mk_attr(attr::mk_list_item(@"link", *match.metas)) - ]; + ]/~; loader::note_linkage_attrs(diag, attrs); } } @@ -161,21 +161,21 @@ fn visit_item(e: env, i: @ast::item) { } fn metas_with(ident: ast::ident, key: ast::ident, - metas: [@ast::meta_item]) -> [@ast::meta_item] { + metas: [@ast::meta_item]/~) -> [@ast::meta_item]/~ { let name_items = attr::find_meta_items_by_name(metas, *key); if name_items.is_empty() { - metas + [attr::mk_name_value_item_str(key, *ident)] + metas + [attr::mk_name_value_item_str(key, *ident)]/~ } else { metas } } fn metas_with_ident(ident: ast::ident, - metas: [@ast::meta_item]) -> [@ast::meta_item] { + metas: [@ast::meta_item]/~) -> [@ast::meta_item]/~ { metas_with(ident, @"name", metas) } -fn existing_match(e: env, metas: [@ast::meta_item], hash: str) -> +fn existing_match(e: env, metas: [@ast::meta_item]/~, hash: str) -> option { for e.crate_cache.each {|c| @@ -187,7 +187,7 @@ fn existing_match(e: env, metas: [@ast::meta_item], hash: str) -> ret none; } -fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item], +fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item]/~, hash: str, span: span) -> ast::crate_num { let metas = metas_with_ident(ident, metas); @@ -241,7 +241,7 @@ fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item], } // Go through the crate metadata and load any crates that it references -fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map { +fn resolve_crate_deps(e: env, cdata: @[u8]/~) -> cstore::cnum_map { #debug("resolving deps of external crate"); // The map from crate numbers in the crate we're resolving to local crate // numbers @@ -249,7 +249,7 @@ fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map { for decoder::get_crate_deps(cdata).each {|dep| let extrn_cnum = dep.cnum; let cname = dep.name; - let cmetas = metas_with(dep.vers, @"vers", []); + let cmetas = metas_with(dep.vers, @"vers", []/~); #debug("resolving dep crate %s ver: %s hash: %s", *dep.name, *dep.vers, *dep.hash); alt existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) { diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs index f65f6110388..f5d628622b2 100644 --- a/src/rustc/metadata/csearch.rs +++ b/src/rustc/metadata/csearch.rs @@ -39,12 +39,12 @@ fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint { } fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num, - path: [ast::ident]) -> [ast::def] { - let mut result = []; + path: [ast::ident]/~) -> [ast::def]/~ { + let mut result = []/~; #debug("lookup_defs: path = %? cnum = %?", path, cnum); for resolve_path(cstore, cnum, path).each {|elt| let (c, data, def) = elt; - result += [decoder::lookup_def(c, data, def)]; + result += [decoder::lookup_def(c, data, def)]/~; } ret result; } @@ -60,21 +60,21 @@ fn lookup_method_purity(cstore: cstore::cstore, did: ast::def_id) /* Returns a vector of possible def IDs for a given path, in a given crate */ fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num, - path: [ast::ident]) -> - [(ast::crate_num, @[u8], ast::def_id)] { + path: [ast::ident]/~) -> + [(ast::crate_num, @[u8]/~, ast::def_id)]/~ { let cm = cstore::get_crate_data(cstore, cnum); #debug("resolve_path %s in crates[%d]:%s", ast_util::path_name_i(path), cnum, cm.name); - let mut result = []; + let mut result = []/~; for decoder::resolve_path(path, cm.data).each {|def| if def.crate == ast::local_crate { - result += [(cnum, cm.data, def)]; + result += [(cnum, cm.data, def)]/~; } else { if cm.cnum_map.contains_key(def.crate) { // This reexport is itself a reexport from another crate let next_cnum = cm.cnum_map.get(def.crate); let next_cm_data = cstore::get_crate_data(cstore, next_cnum); - result += [(next_cnum, next_cm_data.data, def)]; + result += [(next_cnum, next_cm_data.data, def)]/~; } } } @@ -88,7 +88,7 @@ fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path { // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - [ast_map::path_mod(@cdata.name)] + path + [ast_map::path_mod(@cdata.name)]/~ + path } enum found_ast { @@ -109,7 +109,8 @@ fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::def_id, decode_inlined_item) } -fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> [ty::variant_info] { +fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) + -> [ty::variant_info]/~ { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); ret decoder::get_enum_variants(cdata, def.node, tcx) @@ -117,20 +118,20 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> [ty::variant_info] { fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id, name: option) - -> @[@decoder::_impl] { + -> @[@decoder::_impl]/~ { let cdata = cstore::get_crate_data(cstore, def.crate); decoder::get_impls_for_mod(cdata, def.node, name) {|cnum| cstore::get_crate_data(cstore, cnum) } } -fn get_iface_methods(tcx: ty::ctxt, def: ast::def_id) -> @[ty::method] { +fn get_iface_methods(tcx: ty::ctxt, def: ast::def_id) -> @[ty::method]/~ { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); decoder::get_iface_methods(cdata, def.node, tcx) } -fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> [ty::field_ty] { +fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> [ty::field_ty]/~ { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); decoder::get_class_fields(cdata, def.node) @@ -159,7 +160,7 @@ fn get_field_type(tcx: ty::ctxt, class_id: ast::def_id, class_id, def)}); #debug("got field data %?", the_field); let ty = decoder::item_type(def, the_field, tcx, cdata); - ret {bounds: @[], rp: ast::rp_none, ty: ty}; + ret {bounds: @[]/~, rp: ast::rp_none, ty: ty}; } // Given a def_id for an impl or class, return the iface it implements, diff --git a/src/rustc/metadata/cstore.rs b/src/rustc/metadata/cstore.rs index ebfa8df3b10..7f7468faea5 100644 --- a/src/rustc/metadata/cstore.rs +++ b/src/rustc/metadata/cstore.rs @@ -40,7 +40,7 @@ type cnum_map = map::hashmap; type mod_path_map = map::hashmap; type crate_metadata = @{name: str, - data: @[u8], + data: @[u8]/~, cnum_map: cnum_map, cnum: ast::crate_num}; @@ -55,9 +55,9 @@ type cstore_private = @{metas: map::hashmap, use_crate_map: use_crate_map, mod_path_map: mod_path_map, - mut used_crate_files: [str], - mut used_libraries: [str], - mut used_link_args: [str]}; + mut used_crate_files: [str]/~, + mut used_libraries: [str]/~, + mut used_link_args: [str]/~}; // Map from node_id's of local use statements to crate numbers type use_crate_map = map::hashmap; @@ -74,9 +74,9 @@ fn mk_cstore() -> cstore { ret private(@{metas: meta_cache, use_crate_map: crate_map, mod_path_map: mod_path_map, - mut used_crate_files: [], - mut used_libraries: [], - mut used_link_args: []}); + mut used_crate_files: []/~, + mut used_libraries: []/~, + mut used_link_args: []/~}); } fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata { @@ -113,11 +113,11 @@ fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) { fn add_used_crate_file(cstore: cstore, lib: str) { if !vec::contains(p(cstore).used_crate_files, lib) { - p(cstore).used_crate_files += [lib]; + p(cstore).used_crate_files += [lib]/~; } } -fn get_used_crate_files(cstore: cstore) -> [str] { +fn get_used_crate_files(cstore: cstore) -> [str]/~ { ret p(cstore).used_crate_files; } @@ -125,11 +125,11 @@ fn add_used_library(cstore: cstore, lib: str) -> bool { assert lib != ""; if vec::contains(p(cstore).used_libraries, lib) { ret false; } - p(cstore).used_libraries += [lib]; + p(cstore).used_libraries += [lib]/~; ret true; } -fn get_used_libraries(cstore: cstore) -> [str] { +fn get_used_libraries(cstore: cstore) -> [str]/~ { ret p(cstore).used_libraries; } @@ -137,7 +137,7 @@ fn add_used_link_args(cstore: cstore, args: str) { p(cstore).used_link_args += str::split_char(args, ' '); } -fn get_used_link_args(cstore: cstore) -> [str] { +fn get_used_link_args(cstore: cstore) -> [str]/~ { ret p(cstore).used_link_args; } @@ -153,15 +153,15 @@ fn find_use_stmt_cnum(cstore: cstore, // returns hashes of crates directly used by this crate. Hashes are // sorted by crate name. -fn get_dep_hashes(cstore: cstore) -> [@str] { +fn get_dep_hashes(cstore: cstore) -> [@str]/~ { type crate_hash = {name: @str, hash: @str}; - let mut result = []; + let mut result = []/~; for p(cstore).use_crate_map.each_value {|cnum| let cdata = cstore::get_crate_data(cstore, cnum); let hash = decoder::get_crate_hash(cdata.data); #debug("Add hash[%s]: %s", cdata.name, *hash); - result += [{name: @cdata.name, hash: hash}]; + result += [{name: @cdata.name, hash: hash}]/~; }; fn lteq(a: crate_hash, b: crate_hash) -> bool { ret *a.name <= *b.name; @@ -175,9 +175,9 @@ fn get_dep_hashes(cstore: cstore) -> [@str] { ret vec::map(sorted, mapper); } -fn get_path(cstore: cstore, d: ast::def_id) -> [ast::ident] { +fn get_path(cstore: cstore, d: ast::def_id) -> [ast::ident]/~ { // let f = bind str::split_str(_, "::"); - option::map_default(p(cstore).mod_path_map.find(d), [], + option::map_default(p(cstore).mod_path_map.find(d), []/~, {|ds| str::split_str(*ds, "::").map({|x|@x})}) } // Local Variables: diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index 8f25281f4a8..9979bda226c 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -53,8 +53,8 @@ export translate_def_id; // what crate that's in and give us a def_id that makes sense for the current // build. -fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]) -> bool, hash: uint) -> - [ebml::doc] { +fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]/~) -> bool, hash: uint) -> + [ebml::doc]/~ { let index = ebml::get_doc(d, tag_index); let table = ebml::get_doc(index, tag_index_table); let hash_pos = table.start + hash % 256u * 4u; @@ -62,19 +62,19 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]) -> bool, hash: uint) -> let {tag:_, doc:bucket} = ebml::doc_at(d.data, pos); // Awkward logic because we can't ret from foreach yet - let mut result: [ebml::doc] = []; + let mut result: [ebml::doc]/~ = []/~; let belt = tag_index_buckets_bucket_elt; ebml::tagged_docs(bucket, belt) {|elt| let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint; if eq_fn(vec::slice::(*elt.data, elt.start + 4u, elt.end)) { - result += [ebml::doc_at(d.data, pos).doc]; + result += [ebml::doc_at(d.data, pos).doc]/~; } }; ret result; } fn maybe_find_item(item_id: int, items: ebml::doc) -> option { - fn eq_item(bytes: [u8], item_id: int) -> bool { + fn eq_item(bytes: [u8]/~, item_id: int) -> bool { ret io::u64_from_be_bytes(bytes, 0u, 4u) as int == item_id; } let eqer = {|a|eq_item(a, item_id)}; @@ -90,7 +90,7 @@ fn find_item(item_id: int, items: ebml::doc) -> ebml::doc { // Looks up an item in the given metadata and returns an ebml doc pointing // to the item data. -fn lookup_item(item_id: int, data: @[u8]) -> ebml::doc { +fn lookup_item(item_id: int, data: @[u8]/~) -> ebml::doc { let items = ebml::get_doc(ebml::doc(data), tag_items); alt maybe_find_item(item_id, items) { none { fail(#fmt("lookup_item: id not found: %d", item_id)); } @@ -164,13 +164,13 @@ fn item_impl_iface(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) } fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) - -> @[ty::param_bounds] { - let mut bounds = []; + -> @[ty::param_bounds]/~ { + let mut bounds = []/~; ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds) {|p| let bd = parse_bounds_data(p.data, p.start, cdata.cnum, tcx, {|did| translate_def_id(cdata, did) }); - bounds += [bd]; + bounds += [bd]/~; } @bounds } @@ -194,31 +194,31 @@ fn item_ty_param_count(item: ebml::doc) -> uint { n } -fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> [ast::def_id] { - let mut ids: [ast::def_id] = []; +fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> [ast::def_id]/~ { + let mut ids: [ast::def_id]/~ = []/~; let v = tag_items_data_item_variant; ebml::tagged_docs(item, v) {|p| let ext = parse_def_id(ebml::doc_data(p)); - ids += [{crate: cdata.cnum, node: ext.node}]; + ids += [{crate: cdata.cnum, node: ext.node}]/~; }; ret ids; } // Given a path and serialized crate metadata, returns the IDs of the // definitions the path may refer to. -fn resolve_path(path: [ast::ident], data: @[u8]) -> [ast::def_id] { - fn eq_item(data: [u8], s: str) -> bool { +fn resolve_path(path: [ast::ident]/~, data: @[u8]/~) -> [ast::def_id]/~ { + fn eq_item(data: [u8]/~, s: str) -> bool { ret str::eq(str::from_bytes(data), s); } let s = ast_util::path_name_i(path); let md = ebml::doc(data); let paths = ebml::get_doc(md, tag_paths); let eqer = {|a|eq_item(a, s)}; - let mut result: [ast::def_id] = []; + let mut result: [ast::def_id]/~ = []/~; #debug("resolve_path: looking up %s", s); for lookup_hash(paths, eqer, hash_path(s)).each {|doc| let did_doc = ebml::get_doc(doc, tag_def_id); - result += [parse_def_id(ebml::doc_data(did_doc))]; + result += [parse_def_id(ebml::doc_data(did_doc))]/~; } ret result; } @@ -229,16 +229,16 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path { let len_doc = ebml::get_doc(path_doc, tag_path_len); let len = ebml::doc_as_u32(len_doc) as uint; - let mut result = []; + let mut result = []/~; vec::reserve(result, len); ebml::docs(path_doc) {|tag, elt_doc| if tag == tag_path_elt_mod { let str = ebml::doc_as_str(elt_doc); - result += [ast_map::path_mod(@str)]; + result += [ast_map::path_mod(@str)]/~; } else if tag == tag_path_elt_name { let str = ebml::doc_as_str(elt_doc); - result += [ast_map::path_name(@str)]; + result += [ast_map::path_name(@str)]/~; } else { // ignore tag_path_len element } @@ -252,11 +252,11 @@ fn item_name(item: ebml::doc) -> ast::ident { @str::from_bytes(ebml::doc_data(name)) } -fn lookup_item_name(data: @[u8], id: ast::node_id) -> ast::ident { +fn lookup_item_name(data: @[u8]/~, id: ast::node_id) -> ast::ident { item_name(lookup_item(id, data)) } -fn lookup_def(cnum: ast::crate_num, data: @[u8], did_: ast::def_id) -> +fn lookup_def(cnum: ast::crate_num, data: @[u8]/~, did_: ast::def_id) -> ast::def { let item = lookup_item(did_.node, data); let fam_ch = item_family(item); @@ -288,12 +288,12 @@ fn get_type(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) let t = item_type({crate: cdata.cnum, node: id}, item, tcx, cdata); let tp_bounds = if family_has_type_params(item_family(item)) { item_ty_param_bounds(item, tcx, cdata) - } else { @[] }; + } else { @[]/~ }; let rp = item_ty_region_param(item); ret {bounds: tp_bounds, rp: rp, ty: t}; } -fn get_type_param_count(data: @[u8], id: ast::node_id) -> uint { +fn get_type_param_count(data: @[u8]/~, id: ast::node_id) -> uint { item_ty_param_count(lookup_item(id, data)) } @@ -351,7 +351,7 @@ fn class_dtor(cdata: cmd, id: ast::node_id) -> option { found } -fn get_symbol(data: @[u8], id: ast::node_id) -> str { +fn get_symbol(data: @[u8]/~, id: ast::node_id) -> str { ret item_symbol(lookup_item(id, data)); } @@ -392,11 +392,11 @@ fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt, } fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) - -> [ty::variant_info] { + -> [ty::variant_info]/~ { let data = cdata.data; let items = ebml::get_doc(ebml::doc(data), tag_items); let item = find_item(id, items); - let mut infos: [ty::variant_info] = []; + let mut infos: [ty::variant_info]/~ = []/~; let variant_ids = enum_variant_ids(item, cdata); let mut disr_val = 0; for variant_ids.each {|did| @@ -404,10 +404,10 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) let ctor_ty = item_type({crate: cdata.cnum, node: id}, item, tcx, cdata); let name = item_name(item); - let mut arg_tys: [ty::t] = []; + let mut arg_tys: [ty::t]/~ = []/~; alt ty::get(ctor_ty).struct { ty::ty_fn(f) { - for f.inputs.each {|a| arg_tys += [a.ty]; } + for f.inputs.each {|a| arg_tys += [a.ty]/~; } } _ { /* Nullary enum variant. */ } } @@ -416,7 +416,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) _ { /* empty */ } } infos += [@{args: arg_tys, ctor_ty: ctor_ty, name: name, - id: did, disr_val: disr_val}]; + id: did, disr_val: disr_val}]/~; disr_val += 1; } ret infos; @@ -424,18 +424,18 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) // NB: These types are duplicated in resolve.rs type method_info = {did: ast::def_id, n_tps: uint, ident: ast::ident}; -type _impl = {did: ast::def_id, ident: ast::ident, methods: [@method_info]}; +type _impl = {did: ast::def_id, ident: ast::ident, methods: [@method_info]/~}; fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint) - -> [@method_info] { - let mut rslt = []; + -> [@method_info]/~ { + let mut rslt = []/~; ebml::tagged_docs(item, tag_item_impl_method) {|doc| let m_did = parse_def_id(ebml::doc_data(doc)); let mth_item = lookup_item(m_did.node, cdata.data); rslt += [@{did: translate_def_id(cdata, m_did), /* FIXME (maybe #2323) tjc: take a look at this. */ n_tps: item_ty_param_count(mth_item) - base_tps, - ident: item_name(mth_item)}]; + ident: item_name(mth_item)}]/~; } rslt } @@ -443,10 +443,10 @@ fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint) fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id, name: option, get_cdata: fn(ast::crate_num) -> cmd) - -> @[@_impl] { + -> @[@_impl]/~ { let data = cdata.data; let mod_item = lookup_item(m_id, data); - let mut result = []; + let mut result = []/~; ebml::tagged_docs(mod_item, tag_mod_impl) {|doc| let did = parse_def_id(ebml::doc_data(doc)); let local_did = translate_def_id(cdata, did); @@ -461,7 +461,7 @@ fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id, result += [@{ did: local_did, ident: nm, methods: item_impl_methods(impl_cdata, item, base_tps) - }]; + }]/~; }; } @result @@ -469,10 +469,10 @@ fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id, /* Works for both classes and ifaces */ fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) - -> @[ty::method] { + -> @[ty::method]/~ { let data = cdata.data; let item = lookup_item(id, data); - let mut result = []; + let mut result = []/~; ebml::tagged_docs(item, tag_item_iface_method) {|mth| let bounds = item_ty_param_bounds(mth, tcx, cdata); let name = item_name(mth); @@ -487,17 +487,17 @@ fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) 'u' { ast::unsafe_fn } 'f' { ast::impure_fn } 'p' { ast::pure_fn } - }, vis: ast::public}]; + }, vis: ast::public}]/~; } @result } // Helper function that gets either fields or methods fn get_class_members(cdata: cmd, id: ast::node_id, - p: fn(char) -> bool) -> [ty::field_ty] { + p: fn(char) -> bool) -> [ty::field_ty]/~ { let data = cdata.data; let item = lookup_item(id, data); - let mut result = []; + let mut result = []/~; ebml::tagged_docs(item, tag_item_field) {|an_item| let f = item_family(an_item); if p(f) { @@ -505,7 +505,7 @@ fn get_class_members(cdata: cmd, id: ast::node_id, let did = class_member_id(an_item, cdata); let mt = field_mutability(an_item); result += [{ident: name, id: did, vis: - family_to_visibility(f), mutability: mt}]; + family_to_visibility(f), mutability: mt}]/~; } } result @@ -519,7 +519,7 @@ pure fn family_to_visibility(family: char) -> ast::visibility { } /* 'g' for public field, 'j' for private field */ -fn get_class_fields(cdata: cmd, id: ast::node_id) -> [ty::field_ty] { +fn get_class_fields(cdata: cmd, id: ast::node_id) -> [ty::field_ty]/~ { get_class_members(cdata, id, {|f| f == 'g' || f == 'j'}) } @@ -576,12 +576,12 @@ fn item_family_to_str(fam: char) -> str { } } -fn get_meta_items(md: ebml::doc) -> [@ast::meta_item] { - let mut items: [@ast::meta_item] = []; +fn get_meta_items(md: ebml::doc) -> [@ast::meta_item]/~ { + let mut items: [@ast::meta_item]/~ = []/~; ebml::tagged_docs(md, tag_meta_item_word) {|meta_item_doc| let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); let n = str::from_bytes(ebml::doc_data(nd)); - items += [attr::mk_word_item(@n)]; + items += [attr::mk_word_item(@n)]/~; }; ebml::tagged_docs(md, tag_meta_item_name_value) {|meta_item_doc| let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); @@ -590,19 +590,19 @@ fn get_meta_items(md: ebml::doc) -> [@ast::meta_item] { let v = str::from_bytes(ebml::doc_data(vd)); // FIXME (#623): Should be able to decode meta_name_value variants, // but currently the encoder just drops them - items += [attr::mk_name_value_item_str(@n, v)]; + items += [attr::mk_name_value_item_str(@n, v)]/~; }; ebml::tagged_docs(md, tag_meta_item_list) {|meta_item_doc| let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); let n = str::from_bytes(ebml::doc_data(nd)); let subitems = get_meta_items(meta_item_doc); - items += [attr::mk_list_item(@n, subitems)]; + items += [attr::mk_list_item(@n, subitems)]/~; }; ret items; } -fn get_attributes(md: ebml::doc) -> [ast::attribute] { - let mut attrs: [ast::attribute] = []; +fn get_attributes(md: ebml::doc) -> [ast::attribute]/~ { + let mut attrs: [ast::attribute]/~ = []/~; alt ebml::maybe_get_doc(md, tag_attributes) { option::some(attrs_d) { ebml::tagged_docs(attrs_d, tag_attribute) {|attr_doc| @@ -613,7 +613,7 @@ fn get_attributes(md: ebml::doc) -> [ast::attribute] { let meta_item = meta_items[0]; attrs += [{node: {style: ast::attr_outer, value: *meta_item}, - span: ast_util::dummy_sp()}]; + span: ast_util::dummy_sp()}]/~; }; } option::none { } @@ -637,15 +637,15 @@ fn list_crate_attributes(md: ebml::doc, hash: @str, out: io::writer) { out.write_str("\n\n"); } -fn get_crate_attributes(data: @[u8]) -> [ast::attribute] { +fn get_crate_attributes(data: @[u8]/~) -> [ast::attribute]/~ { ret get_attributes(ebml::doc(data)); } type crate_dep = {cnum: ast::crate_num, name: ast::ident, vers: @str, hash: @str}; -fn get_crate_deps(data: @[u8]) -> [crate_dep] { - let mut deps: [crate_dep] = []; +fn get_crate_deps(data: @[u8]/~) -> [crate_dep]/~ { + let mut deps: [crate_dep]/~ = []/~; let cratedoc = ebml::doc(data); let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps); let mut crate_num = 1; @@ -656,13 +656,13 @@ fn get_crate_deps(data: @[u8]) -> [crate_dep] { deps += [{cnum: crate_num, name: @docstr(depdoc, tag_crate_dep_name), vers: @docstr(depdoc, tag_crate_dep_vers), - hash: @docstr(depdoc, tag_crate_dep_hash)}]; + hash: @docstr(depdoc, tag_crate_dep_hash)}]/~; crate_num += 1; }; ret deps; } -fn list_crate_deps(data: @[u8], out: io::writer) { +fn list_crate_deps(data: @[u8]/~, out: io::writer) { out.write_str("=External Dependencies=\n"); for get_crate_deps(data).each {|dep| @@ -673,13 +673,13 @@ fn list_crate_deps(data: @[u8], out: io::writer) { out.write_str("\n"); } -fn get_crate_hash(data: @[u8]) -> @str { +fn get_crate_hash(data: @[u8]/~) -> @str { let cratedoc = ebml::doc(data); let hashdoc = ebml::get_doc(cratedoc, tag_crate_hash); ret @str::from_bytes(ebml::doc_data(hashdoc)); } -fn get_crate_vers(data: @[u8]) -> @str { +fn get_crate_vers(data: @[u8]/~) -> @str { let attrs = decoder::get_crate_attributes(data); ret alt attr::last_meta_item_value_str_by_name( attr::find_linkage_metas(attrs), "vers") { @@ -688,7 +688,7 @@ fn get_crate_vers(data: @[u8]) -> @str { }; } -fn list_crate_items(bytes: @[u8], md: ebml::doc, out: io::writer) { +fn list_crate_items(bytes: @[u8]/~, md: ebml::doc, out: io::writer) { out.write_str("=Items=\n"); let items = ebml::get_doc(md, tag_items); iter_crate_items(bytes) {|path, did| @@ -697,7 +697,7 @@ fn list_crate_items(bytes: @[u8], md: ebml::doc, out: io::writer) { out.write_str("\n"); } -fn iter_crate_items(bytes: @[u8], proc: fn(str, ast::def_id)) { +fn iter_crate_items(bytes: @[u8]/~, proc: fn(str, ast::def_id)) { let md = ebml::doc(bytes); let paths = ebml::get_doc(md, tag_paths); let index = ebml::get_doc(paths, tag_index); @@ -714,14 +714,14 @@ fn iter_crate_items(bytes: @[u8], proc: fn(str, ast::def_id)) { }; } -fn get_crate_module_paths(bytes: @[u8]) -> [(ast::def_id, str)] { +fn get_crate_module_paths(bytes: @[u8]/~) -> [(ast::def_id, str)]/~ { fn mod_of_path(p: str) -> str { str::connect(vec::init(str::split_str(p, "::")), "::") } // find all module (path, def_ids), which are not // fowarded path due to renamed import or reexport - let mut res = []; + let mut res = []/~; let mods = map::str_hash(); iter_crate_items(bytes) {|path, did| let m = mod_of_path(path); @@ -732,7 +732,7 @@ fn get_crate_module_paths(bytes: @[u8]) -> [(ast::def_id, str)] { // Collect everything by now. There might be multiple // paths pointing to the same did. Those will be // unified later by using the mods map - res += [(did, path)]; + res += [(did, path)]/~; } ret vec::filter(res) {|x| let (_, xp) = x; @@ -740,7 +740,7 @@ fn get_crate_module_paths(bytes: @[u8]) -> [(ast::def_id, str)] { } } -fn list_crate_metadata(bytes: @[u8], out: io::writer) { +fn list_crate_metadata(bytes: @[u8]/~, out: io::writer) { let hash = get_crate_hash(bytes); let md = ebml::doc(bytes); list_crate_attributes(md, hash, out); diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 8390989e7dd..c9802da3b03 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -43,8 +43,8 @@ type encode_parms = { diag: span_handler, tcx: ty::ctxt, reachable: hashmap, - reexports: [(str, def_id)], - impl_map: fn@(ast::node_id) -> [(ident, def_id)], + reexports: [(str, def_id)]/~, + impl_map: fn@(ast::node_id) -> [(ident, def_id)]/~, item_symbols: hashmap, discrim_symbols: hashmap, link_meta: link_meta, @@ -56,8 +56,8 @@ enum encode_ctxt = { diag: span_handler, tcx: ty::ctxt, reachable: hashmap, - reexports: [(str, def_id)], - impl_map: fn@(ast::node_id) -> [(ident, def_id)], + reexports: [(str, def_id)]/~, + impl_map: fn@(ast::node_id) -> [(ident, def_id)]/~, item_symbols: hashmap, discrim_symbols: hashmap, link_meta: link_meta, @@ -108,8 +108,8 @@ fn encode_mutability(ebml_w: ebml::writer, mt: class_mutability) { type entry = {val: T, pos: uint}; -fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant], - path: [ident], &index: [entry]) { +fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant]/~, + path: [ident]/~, &index: [entry]/~) { for variants.each {|variant| add_to_index(ebml_w, path, index, variant.node.name); ebml_w.wr_tag(tag_paths_data_item) {|| @@ -119,15 +119,15 @@ fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant], } } -fn add_to_index(ebml_w: ebml::writer, path: [ident], &index: [entry], +fn add_to_index(ebml_w: ebml::writer, path: [ident]/~, &index: [entry]/~, name: ident) { - let full_path = path + [name]; - index += - [{val: ast_util::path_name_i(full_path), pos: ebml_w.writer.tell()}]; + let full_path = path + [name]/~; + vec::push(index, {val: ast_util::path_name_i(full_path), + pos: ebml_w.writer.tell()}); } fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod, - path: [ident], &index: [entry]) { + path: [ident]/~, &index: [entry]/~) { for nmod.items.each {|nitem| add_to_index(ebml_w, path, index, nitem.ident); encode_named_def_id(ebml_w, nitem.ident, local_def(nitem.id)); @@ -135,7 +135,7 @@ fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod, } fn encode_class_item_paths(ebml_w: ebml::writer, - items: [@class_member], path: [ident], &index: [entry]) { + items: [@class_member]/~, path: [ident]/~, &index: [entry]/~) { for items.each {|it| alt ast_util::class_member_visibility(it) { private { cont; } @@ -152,8 +152,8 @@ fn encode_class_item_paths(ebml_w: ebml::writer, } fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, - module: _mod, path: [ident], - &index: [entry]) { + module: _mod, path: [ident]/~, + &index: [entry]/~) { for module.items.each {|it| if !reachable(ecx, it.id) || !ast_util::is_exported(it.ident, module) { cont; } @@ -170,7 +170,8 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, item_mod(_mod) { ebml_w.wr_tag(tag_paths_data_mod) {|| encode_name_and_def_id(ebml_w, it.ident, it.id); - encode_module_item_paths(ebml_w, ecx, _mod, path + [it.ident], + encode_module_item_paths(ebml_w, ecx, _mod, + path + [it.ident]/~, index); } } @@ -178,7 +179,7 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, ebml_w.wr_tag(tag_paths_data_mod) {|| encode_name_and_def_id(ebml_w, it.ident, it.id); encode_native_module_item_paths(ebml_w, nmod, - path + [it.ident], index); + path + [it.ident]/~, index); } } item_ty(_, tps, _) { @@ -196,7 +197,7 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, add_to_index(ebml_w, path, index, it.ident); encode_named_def_id(ebml_w, it.ident, local_def(ctor.node.id)); - encode_class_item_paths(ebml_w, items, path + [it.ident], + encode_class_item_paths(ebml_w, items, path + [it.ident]/~, index); } } @@ -223,9 +224,9 @@ fn encode_iface_ref(ebml_w: ebml::writer, ecx: @encode_ctxt, t: @iface_ref) { } fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate) - -> [entry] { - let mut index: [entry] = []; - let mut path: [ident] = []; + -> [entry]/~ { + let mut index: [entry]/~ = []/~; + let mut path: [ident]/~ = []/~; ebml_w.start_tag(tag_paths); encode_module_item_paths(ebml_w, ecx, crate.node.module, path, index); encode_reexport_paths(ebml_w, ecx, index); @@ -234,10 +235,10 @@ fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate) } fn encode_reexport_paths(ebml_w: ebml::writer, - ecx: @encode_ctxt, &index: [entry]) { + ecx: @encode_ctxt, &index: [entry]/~) { for ecx.reexports.each {|reexport| let (path, def_id) = reexport; - index += [{val: path, pos: ebml_w.writer.tell()}]; + index += [{val: path, pos: ebml_w.writer.tell()}]/~; ebml_w.start_tag(tag_paths_data_item); encode_name(ebml_w, @path); encode_def_id(ebml_w, def_id); @@ -256,7 +257,7 @@ fn encode_family(ebml_w: ebml::writer, c: char) { fn def_to_str(did: def_id) -> str { ret #fmt["%d:%d", did.crate, did.node]; } fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt, - params: [ty_param]) { + params: [ty_param]/~) { let ty_str_ctxt = @{diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, @@ -324,14 +325,14 @@ fn encode_parent_item(ebml_w: ebml::writer, id: def_id) { } fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer, - id: node_id, variants: [variant], - path: ast_map::path, index: @mut [entry], - ty_params: [ty_param]) { + id: node_id, variants: [variant]/~, + path: ast_map::path, index: @mut [entry]/~, + ty_params: [ty_param]/~) { let mut disr_val = 0; let mut i = 0; let vi = ty::enum_variants(ecx.tcx, {crate: local_crate, node: id}); for variants.each {|variant| - *index += [{val: variant.node.id, pos: ebml_w.writer.tell()}]; + *index += [{val: variant.node.id, pos: ebml_w.writer.tell()}]/~; ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(variant.node.id)); encode_family(ebml_w, 'v'); @@ -420,20 +421,20 @@ fn encode_visibility(ebml_w: ebml::writer, visibility: visibility) { /* Returns an index of items in this class */ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id, path: ast_map::path, - class_tps: [ty_param], - items: [@class_member], - global_index: @mut[entry]) -> [entry] { + class_tps: [ty_param]/~, + items: [@class_member]/~, + global_index: @mut[entry]/~) -> [entry]/~ { /* Each class has its own index, since different classes may have fields with the same name */ - let index = @mut []; + let index = @mut []/~; let tcx = ecx.tcx; for items.each {|ci| /* We encode both private and public fields -- need to include private fields to get the offsets right */ alt ci.node { instance_var(nm, _, mt, id, vis) { - *index += [{val: id, pos: ebml_w.writer.tell()}]; - *global_index += [{val: id, pos: ebml_w.writer.tell()}]; + *index += [{val: id, pos: ebml_w.writer.tell()}]/~; + *global_index += [{val: id, pos: ebml_w.writer.tell()}]/~; ebml_w.start_tag(tag_items_data_item); #debug("encode_info_for_class: doing %s %d", *nm, id); encode_visibility(ebml_w, vis); @@ -447,9 +448,9 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer, class_method(m) { alt m.vis { public { - *index += [{val: m.id, pos: ebml_w.writer.tell()}]; - *global_index += [{val: m.id, pos: ebml_w.writer.tell()}]; - let impl_path = path + [ast_map::path_name(m.ident)]; + *index += [{val: m.id, pos: ebml_w.writer.tell()}]/~; + *global_index += [{val: m.id, pos: ebml_w.writer.tell()}]/~; + let impl_path = path + [ast_map::path_name(m.ident)]/~; #debug("encode_info_for_class: doing %s %d", *m.ident, m.id); encode_info_for_method(ecx, ebml_w, impl_path, should_inline(m.attrs), id, m, @@ -465,7 +466,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer, fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id, ident: ident, path: ast_map::path, - item: option, tps: [ty_param], + item: option, tps: [ty_param]/~, decl: fn_decl) { ebml_w.start_tag(tag_items_data_item); encode_name(ebml_w, ident); @@ -491,7 +492,7 @@ fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer, fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::writer, impl_path: ast_map::path, should_inline: bool, parent_id: node_id, - m: @method, all_tps: [ty_param]) { + m: @method, all_tps: [ty_param]/~) { #debug("encode_info_for_method: %d %s %u", m.id, *m.ident, all_tps.len()); ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(m.id)); @@ -520,7 +521,7 @@ fn purity_fn_family(p: purity) -> char { } -fn should_inline(attrs: [attribute]) -> bool { +fn should_inline(attrs: [attribute]/~) -> bool { alt attr::find_inline_attr(attrs) { attr::ia_none { false } attr::ia_hint | attr::ia_always { true } @@ -529,7 +530,7 @@ fn should_inline(attrs: [attribute]) -> bool { fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, - index: @mut [entry], path: ast_map::path) { + index: @mut [entry]/~, path: ast_map::path) { let tcx = ecx.tcx; let must_write = @@ -537,8 +538,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, if !must_write && !reachable(ecx, item.id) { ret; } fn add_to_index_(item: @item, ebml_w: ebml::writer, - index: @mut [entry]) { - *index += [{val: item.id, pos: ebml_w.writer.tell()}]; + index: @mut [entry]/~) { + *index += [{val: item.id, pos: ebml_w.writer.tell()}]/~; } let add_to_index = {|copy ebml_w|add_to_index_(item, ebml_w, index)}; @@ -620,7 +621,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, items, index); /* Encode the dtor */ option::iter(m_dtor) {|dtor| - *index += [{val: dtor.node.id, pos: ebml_w.writer.tell()}]; + *index += [{val: dtor.node.id, pos: ebml_w.writer.tell()}]/~; encode_info_for_fn(ecx, ebml_w, dtor.node.id, @(*item.ident + "_dtor"), path, if tps.len() > 0u { some(ii_dtor(dtor, item.ident, tps, @@ -707,9 +708,9 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_path(ebml_w, path, ast_map::path_name(item.ident)); ebml_w.end_tag(); - let impl_path = path + [ast_map::path_name(item.ident)]; + let impl_path = path + [ast_map::path_name(item.ident)]/~; for methods.each {|m| - *index += [{val: m.id, pos: ebml_w.writer.tell()}]; + *index += [{val: m.id, pos: ebml_w.writer.tell()}]/~; encode_info_for_method(ecx, ebml_w, impl_path, should_inline(m.attrs), item.id, m, tps + m.tps); } @@ -741,10 +742,10 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, fn encode_info_for_native_item(ecx: @encode_ctxt, ebml_w: ebml::writer, nitem: @native_item, - index: @mut [entry], + index: @mut [entry]/~, path: ast_map::path, abi: native_abi) { if !reachable(ecx, nitem.id) { ret; } - *index += [{val: nitem.id, pos: ebml_w.writer.tell()}]; + *index += [{val: nitem.id, pos: ebml_w.writer.tell()}]/~; ebml_w.start_tag(tag_items_data_item); alt nitem.node { @@ -766,12 +767,12 @@ fn encode_info_for_native_item(ecx: @encode_ctxt, ebml_w: ebml::writer, } fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, - crate: @crate) -> [entry] { - let index = @mut []; + crate: @crate) -> [entry]/~ { + let index = @mut []/~; ebml_w.start_tag(tag_items_data); - *index += [{val: crate_node_id, pos: ebml_w.writer.tell()}]; + *index += [{val: crate_node_id, pos: ebml_w.writer.tell()}]/~; encode_info_for_mod(ecx, ebml_w, crate.node.module, - crate_node_id, [], @""); + crate_node_id, []/~, @""); visit::visit_crate(*crate, (), visit::mk_vt(@{ visit_expr: {|_e, _cx, _v|}, visit_item: {|i, cx, v, copy ebml_w| @@ -784,7 +785,8 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, item_class(tps, _, _, ctor, m_dtor, _) { #debug("encoding info for ctor %s %d", *i.ident, ctor.node.id); - *index += [{val: ctor.node.id, pos: ebml_w.writer.tell()}]; + *index += + [{val: ctor.node.id, pos: ebml_w.writer.tell()}]/~; encode_info_for_fn(ecx, ebml_w, ctor.node.id, i.ident, *pt, if tps.len() > 0u { some(ii_ctor(ctor, i.ident, tps, @@ -813,30 +815,30 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, // Path and definition ID indexing -fn create_index(index: [entry], hash_fn: fn@(T) -> uint) -> - [@[entry]] { - let mut buckets: [@mut [entry]] = []; - for uint::range(0u, 256u) {|_i| buckets += [@mut []]; }; +fn create_index(index: [entry]/~, hash_fn: fn@(T) -> uint) -> + [@[entry]/~]/~ { + let mut buckets: [@mut [entry]/~]/~ = []/~; + for uint::range(0u, 256u) {|_i| buckets += [@mut []/~]/~; }; for index.each {|elt| let h = hash_fn(elt.val); - *buckets[h % 256u] += [elt]; + vec::push(*buckets[h % 256u], elt); } - let mut buckets_frozen = []; + let mut buckets_frozen = []/~; for buckets.each {|bucket| - buckets_frozen += [@*bucket]; + vec::push(buckets_frozen, @*bucket); } ret buckets_frozen; } -fn encode_index(ebml_w: ebml::writer, buckets: [@[entry]], +fn encode_index(ebml_w: ebml::writer, buckets: [@[entry]/~]/~, write_fn: fn(io::writer, T)) { let writer = ebml_w.writer; ebml_w.start_tag(tag_index); - let mut bucket_locs: [uint] = []; + let mut bucket_locs: [uint]/~ = []/~; ebml_w.start_tag(tag_index_buckets); for buckets.each {|bucket| - bucket_locs += [ebml_w.writer.tell()]; + bucket_locs += [ebml_w.writer.tell()]/~; ebml_w.start_tag(tag_index_buckets_bucket); for vec::each(*bucket) {|elt| ebml_w.start_tag(tag_index_buckets_bucket_elt); @@ -896,7 +898,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) { } } -fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) { +fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]/~) { ebml_w.start_tag(tag_attributes); for attrs.each {|attr| ebml_w.start_tag(tag_attribute); @@ -910,9 +912,9 @@ fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) { // metadata that Rust cares about for linking crates. This attribute requires // 'name' and 'vers' items, so if the user didn't provide them we will throw // them in anyway with default values. -fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute] { +fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute]/~ { - fn synthesize_link_attr(ecx: @encode_ctxt, items: [@meta_item]) -> + fn synthesize_link_attr(ecx: @encode_ctxt, items: [@meta_item]/~) -> attribute { assert (*ecx.link_meta.name != ""); @@ -929,47 +931,47 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute] { attr::remove_meta_items_by_name(tmp, @"vers") }; - let meta_items = [name_item, vers_item] + other_items; + let meta_items = [name_item, vers_item]/~ + other_items; let link_item = attr::mk_list_item(@"link", meta_items); ret attr::mk_attr(link_item); } - let mut attrs: [attribute] = []; + let mut attrs: [attribute]/~ = []/~; let mut found_link_attr = false; for crate.node.attrs.each {|attr| attrs += if *attr::get_attr_name(attr) != "link" { - [attr] + [attr]/~ } else { alt attr.node.value.node { meta_list(n, l) { found_link_attr = true;; - [synthesize_link_attr(ecx, l)] + [synthesize_link_attr(ecx, l)]/~ } - _ { [attr] } + _ { [attr]/~ } } }; } - if !found_link_attr { attrs += [synthesize_link_attr(ecx, [])]; } + if !found_link_attr { attrs += [synthesize_link_attr(ecx, []/~)]/~; } ret attrs; } fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) { - fn get_ordered_deps(cstore: cstore::cstore) -> [decoder::crate_dep] { + fn get_ordered_deps(cstore: cstore::cstore) -> [decoder::crate_dep]/~ { type hashkv = @{key: crate_num, val: cstore::crate_metadata}; type numdep = decoder::crate_dep; // Pull the cnums and name,vers,hash out of cstore - let mut deps: [mut numdep] = [mut]; + let mut deps: [mut numdep]/~ = [mut]/~; cstore::iter_crate_data(cstore) {|key, val| let dep = {cnum: key, name: @val.name, vers: decoder::get_crate_vers(val.data), hash: decoder::get_crate_hash(val.data)}; - deps += [mut dep]; + deps += [mut dep]/~; }; // Sort by cnum @@ -1018,7 +1020,7 @@ fn encode_hash(ebml_w: ebml::writer, hash: str) { ebml_w.end_tag(); } -fn encode_metadata(parms: encode_parms, crate: @crate) -> [u8] { +fn encode_metadata(parms: encode_parms, crate: @crate) -> [u8]/~ { let ecx: @encode_ctxt = @encode_ctxt({ diag: parms.diag, tcx: parms.tcx, diff --git a/src/rustc/metadata/filesearch.rs b/src/rustc/metadata/filesearch.rs index eeb44f80fd7..c66c02bce9c 100644 --- a/src/rustc/metadata/filesearch.rs +++ b/src/rustc/metadata/filesearch.rs @@ -25,29 +25,29 @@ fn pick_file(file: path, path: path) -> option { iface filesearch { fn sysroot() -> path; - fn lib_search_paths() -> [path]; + fn lib_search_paths() -> [path]/~; fn get_target_lib_path() -> path; fn get_target_lib_file_path(file: path) -> path; } fn mk_filesearch(maybe_sysroot: option, target_triple: str, - addl_lib_search_paths: [path]) -> filesearch { + addl_lib_search_paths: [path]/~) -> filesearch { type filesearch_impl = {sysroot: path, - addl_lib_search_paths: [path], + addl_lib_search_paths: [path]/~, target_triple: str}; impl of filesearch for filesearch_impl { fn sysroot() -> path { self.sysroot } - fn lib_search_paths() -> [path] { + fn lib_search_paths() -> [path]/~ { self.addl_lib_search_paths - + [make_target_lib_path(self.sysroot, self.target_triple)] + + [make_target_lib_path(self.sysroot, self.target_triple)]/~ + alt get_cargo_lib_path_nearest() { - result::ok(p) { [p] } - result::err(p) { [] } + result::ok(p) { [p]/~ } + result::err(p) { []/~ } } + alt get_cargo_lib_path() { - result::ok(p) { [p] } - result::err(p) { [] } + result::ok(p) { [p]/~ } + result::err(p) { []/~ } } } fn get_target_lib_path() -> path { @@ -85,13 +85,13 @@ fn search(filesearch: filesearch, pick: pick) -> option { ret rslt; } -fn relative_target_lib_path(target_triple: str) -> [path] { - [libdir(), "rustc", target_triple, libdir()] +fn relative_target_lib_path(target_triple: str) -> [path]/~ { + [libdir(), "rustc", target_triple, libdir()]/~ } fn make_target_lib_path(sysroot: path, target_triple: str) -> path { - let path = [sysroot] + relative_target_lib_path(target_triple); + let path = [sysroot]/~ + relative_target_lib_path(target_triple); let path = path::connect_many(path); ret path; } @@ -113,7 +113,7 @@ fn get_sysroot(maybe_sysroot: option) -> path { } fn get_cargo_sysroot() -> result { - let path = [get_default_sysroot(), libdir(), "cargo"]; + let path = [get_default_sysroot(), libdir(), "cargo"]/~; result::ok(path::connect_many(path)) } diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs index a5874aa29fa..e7fbdb96d56 100644 --- a/src/rustc/metadata/loader.rs +++ b/src/rustc/metadata/loader.rs @@ -33,13 +33,13 @@ type ctxt = { filesearch: filesearch, span: span, ident: ast::ident, - metas: [@ast::meta_item], + metas: [@ast::meta_item]/~, hash: str, os: os, static: bool }; -fn load_library_crate(cx: ctxt) -> {ident: str, data: @[u8]} { +fn load_library_crate(cx: ctxt) -> {ident: str, data: @[u8]/~} { alt find_library_crate(cx) { some(t) { ret t; } none { @@ -49,7 +49,7 @@ fn load_library_crate(cx: ctxt) -> {ident: str, data: @[u8]} { } } -fn find_library_crate(cx: ctxt) -> option<{ident: str, data: @[u8]}> { +fn find_library_crate(cx: ctxt) -> option<{ident: str, data: @[u8]/~}> { attr::require_unique_names(cx.diag, cx.metas); find_library_crate_aux(cx, libname(cx), cx.filesearch) } @@ -67,12 +67,12 @@ fn libname(cx: ctxt) -> {prefix: str, suffix: str} { fn find_library_crate_aux(cx: ctxt, nn: {prefix: str, suffix: str}, filesearch: filesearch::filesearch) -> - option<{ident: str, data: @[u8]}> { + option<{ident: str, data: @[u8]/~}> { let crate_name = crate_name_from_metas(cx.metas); let prefix: str = nn.prefix + *crate_name + "-"; let suffix: str = nn.suffix; - let mut matches = []; + let mut matches = []/~; filesearch::search(filesearch, { |path| #debug("inspecting file %s", path); let f: str = path::basename(path); @@ -89,7 +89,7 @@ fn find_library_crate_aux(cx: ctxt, option::none::<()> } else { #debug("found %s with matching metadata", path); - matches += [{ident: path, data: cvec}]; + matches += [{ident: path, data: cvec}]/~; option::none::<()> } } @@ -119,7 +119,7 @@ fn find_library_crate_aux(cx: ctxt, } } -fn crate_name_from_metas(metas: [@ast::meta_item]) -> @str { +fn crate_name_from_metas(metas: [@ast::meta_item]/~) -> @str { let name_items = attr::find_meta_items_by_name(metas, "name"); alt vec::last_opt(name_items) { some(i) { @@ -134,14 +134,14 @@ fn crate_name_from_metas(metas: [@ast::meta_item]) -> @str { } } -fn note_linkage_attrs(diag: span_handler, attrs: [ast::attribute]) { +fn note_linkage_attrs(diag: span_handler, attrs: [ast::attribute]/~) { for attr::find_linkage_attrs(attrs).each {|attr| diag.handler().note(#fmt("meta: %s", pprust::attr_to_str(attr))); } } -fn crate_matches(crate_data: @[u8], metas: [@ast::meta_item], hash: str) -> - bool { +fn crate_matches(crate_data: @[u8]/~, metas: [@ast::meta_item]/~, + hash: str) -> bool { let attrs = decoder::get_crate_attributes(crate_data); let linkage_metas = attr::find_linkage_metas(attrs); if hash.is_not_empty() { @@ -151,8 +151,8 @@ fn crate_matches(crate_data: @[u8], metas: [@ast::meta_item], hash: str) -> metadata_matches(linkage_metas, metas) } -fn metadata_matches(extern_metas: [@ast::meta_item], - local_metas: [@ast::meta_item]) -> bool { +fn metadata_matches(extern_metas: [@ast::meta_item]/~, + local_metas: [@ast::meta_item]/~) -> bool { #debug("matching %u metadata requirements against %u items", vec::len(local_metas), vec::len(extern_metas)); @@ -173,14 +173,14 @@ fn metadata_matches(extern_metas: [@ast::meta_item], } fn get_metadata_section(os: os, - filename: str) -> option<@[u8]> unsafe { + filename: str) -> option<@[u8]/~> unsafe { let mb = str::as_c_str(filename, {|buf| llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf) }); - if mb as int == 0 { ret option::none::<@[u8]>; } + if mb as int == 0 { ret option::none::<@[u8]/~>; } let of = alt mk_object_file(mb) { option::some(of) { of } - _ { ret option::none::<@[u8]>; } + _ { ret option::none::<@[u8]/~>; } }; let si = mk_section_iter(of.llof); while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { @@ -196,7 +196,7 @@ fn get_metadata_section(os: os, } llvm::LLVMMoveToNextSection(si.llsi); } - ret option::none::<@[u8]>; + ret option::none::<@[u8]/~>; } fn meta_section_name(os: os) -> str { diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs index 794af5577e7..28eddf2b238 100644 --- a/src/rustc/metadata/tydecode.rs +++ b/src/rustc/metadata/tydecode.rs @@ -17,7 +17,7 @@ export parse_bounds_data; // Callback to translate defs to strs or back: type conv_did = fn(ast::def_id) -> ast::def_id; -type pstate = {data: @[u8], crate: int, mut pos: uint, tcx: ty::ctxt}; +type pstate = {data: @[u8]/~, crate: int, mut pos: uint, tcx: ty::ctxt}; fn peek(st: @pstate) -> char { st.data[st.pos] as char @@ -50,7 +50,7 @@ fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) -> } -fn parse_ty_data(data: @[u8], crate_num: int, pos: uint, tcx: ty::ctxt, +fn parse_ty_data(data: @[u8]/~, crate_num: int, pos: uint, tcx: ty::ctxt, conv: conv_did) -> ty::t { let st = @{data: data, crate: crate_num, mut pos: pos, tcx: tcx}; parse_ty(st, conv) @@ -65,13 +65,13 @@ fn parse_ret_ty(st: @pstate, conv: conv_did) -> (ast::ret_style, ty::t) { fn parse_constrs_gen(st: @pstate, conv: conv_did, pser: fn(@pstate) - -> ast::constr_arg_general_) -> [@ty::constr_general] { - let mut rslt: [@ty::constr_general] = []; + -> ast::constr_arg_general_) -> [@ty::constr_general]/~ { + let mut rslt: [@ty::constr_general]/~ = []/~; alt peek(st) { ':' { loop { next(st); - rslt += [parse_constr(st, conv, pser)]; + rslt += [parse_constr(st, conv, pser)]/~; if peek(st) != ';' { break; } } } @@ -80,18 +80,18 @@ fn parse_constrs_gen(st: @pstate, conv: conv_did, rslt } -fn parse_constrs(st: @pstate, conv: conv_did) -> [@ty::constr] { +fn parse_constrs(st: @pstate, conv: conv_did) -> [@ty::constr]/~ { parse_constrs_gen(st, conv, parse_constr_arg) } -fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr] { +fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr]/~ { parse_constrs_gen(st, conv, parse_ty_constr_arg) } fn parse_path(st: @pstate) -> @ast::path { - let mut idents: [ast::ident] = []; + let mut idents: [ast::ident]/~ = []/~; fn is_last(c: char) -> bool { ret c == '(' || c == ':'; } - idents += [parse_ident_(st, is_last)]; + idents += [parse_ident_(st, is_last)]/~; loop { alt peek(st) { ':' { next(st); next(st); } @@ -99,8 +99,8 @@ fn parse_path(st: @pstate) -> @ast::path { if c == '(' { ret @{span: ast_util::dummy_sp(), global: false, idents: idents, - rp: none, types: []}; - } else { idents += [parse_ident_(st, is_last)]; } + rp: none, types: []/~}; + } else { idents += [parse_ident_(st, is_last)]/~; } } } }; @@ -124,7 +124,7 @@ fn parse_constr_arg(st: @pstate) -> ast::fn_constr_arg { /* else { auto lit = parse_lit(st, conv, ','); - args += [respan(st.span, ast::carg_lit(lit))]; + args += [respan(st.span, ast::carg_lit(lit))]/~; } */ } @@ -143,7 +143,7 @@ fn parse_constr(st: @pstate, conv: conv_did, -> @ty::constr_general { // FIXME: use real spans and not a bogus one (#2407) let sp = ast_util::dummy_sp(); - let mut args: [@sp_constr_arg] = []; + let mut args: [@sp_constr_arg]/~ = []/~; let pth = parse_path(st); let mut ignore: char = next(st); assert (ignore == '('); @@ -151,7 +151,7 @@ fn parse_constr(st: @pstate, conv: conv_did, let mut an_arg: constr_arg_general_; loop { an_arg = pser(st); - args += [@respan(sp, an_arg)]; + args += [@respan(sp, an_arg)]/~; ignore = next(st); if ignore != ';' { break; } } @@ -197,8 +197,8 @@ fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs { let self_ty = parse_opt(st) {|| parse_ty(st, conv) }; assert next(st) == '['; - let mut params: [ty::t] = []; - while peek(st) != ']' { params += [parse_ty(st, conv)]; } + let mut params: [ty::t]/~ = []/~; + while peek(st) != ']' { params += [parse_ty(st, conv)]/~; } st.pos = st.pos + 1u; ret {self_r: self_r, @@ -320,18 +320,18 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t { } 'R' { assert (next(st) == '['); - let mut fields: [ty::field] = []; + let mut fields: [ty::field]/~ = []/~; while peek(st) != ']' { let name = @parse_str(st, '='); - fields += [{ident: name, mt: parse_mt(st, conv)}]; + fields += [{ident: name, mt: parse_mt(st, conv)}]/~; } st.pos = st.pos + 1u; ret ty::mk_rec(st.tcx, fields); } 'T' { assert (next(st) == '['); - let mut params = []; - while peek(st) != ']' { params += [parse_ty(st, conv)]; } + let mut params = []/~; + while peek(st) != ']' { params += [parse_ty(st, conv)]/~; } st.pos = st.pos + 1u; ret ty::mk_tup(st.tcx, params); } @@ -403,8 +403,8 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt { } fn parse_def(st: @pstate, conv: conv_did) -> ast::def_id { - let mut def = []; - while peek(st) != '|' { def += [next_byte(st)]; } + let mut def = []/~; + while peek(st) != '|' { def += [next_byte(st)]/~; } st.pos = st.pos + 1u; ret conv(parse_def_id(def)); } @@ -446,7 +446,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty { let proto = parse_proto(next(st)); let purity = parse_purity(next(st)); assert (next(st) == '['); - let mut inputs: [ty::arg] = []; + let mut inputs: [ty::arg]/~ = []/~; while peek(st) != ']' { let mode = alt check peek(st) { '&' { ast::by_mutbl_ref } @@ -456,7 +456,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty { '#' { ast::by_val } }; st.pos += 1u; - inputs += [{mode: ast::expl(mode), ty: parse_ty(st, conv)}]; + inputs += [{mode: ast::expl(mode), ty: parse_ty(st, conv)}]/~; } st.pos += 1u; // eat the ']' let cs = parse_constrs(st, conv); @@ -467,7 +467,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty { // Rust metadata parsing -fn parse_def_id(buf: [u8]) -> ast::def_id { +fn parse_def_id(buf: [u8]/~) -> ast::def_id { let mut colon_idx = 0u; let len = vec::len(buf); while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; } @@ -491,15 +491,15 @@ fn parse_def_id(buf: [u8]) -> ast::def_id { ret {crate: crate_num, node: def_num}; } -fn parse_bounds_data(data: @[u8], start: uint, +fn parse_bounds_data(data: @[u8]/~, start: uint, crate_num: int, tcx: ty::ctxt, conv: conv_did) - -> @[ty::param_bound] { + -> @[ty::param_bound]/~ { let st = @{data: data, crate: crate_num, mut pos: start, tcx: tcx}; parse_bounds(st, conv) } -fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound] { - let mut bounds = []; +fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound]/~ { + let mut bounds = []/~; loop { bounds += [alt check next(st) { 'S' { ty::bound_send } @@ -507,7 +507,7 @@ fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound] { 'K' { ty::bound_const } 'I' { ty::bound_iface(parse_ty(st, conv)) } '.' { break; } - }]; + }]/~; } @bounds } diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index dc1cbac434b..3f7c6ee648c 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -395,7 +395,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) { }); } -fn enc_bounds(w: io::writer, cx: @ctxt, bs: @[ty::param_bound]) { +fn enc_bounds(w: io::writer, cx: @ctxt, bs: @[ty::param_bound]/~) { for vec::each(*bs) {|bound| alt bound { ty::bound_send { w.write_char('S'); } diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs index 2fae0850187..e463f371bf5 100644 --- a/src/rustc/middle/astencode.rs +++ b/src/rustc/middle/astencode.rs @@ -540,7 +540,7 @@ impl helpers for ebml::writer { e::write_type(ecx, self, ty) } - fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]) { + fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]/~) { self.emit_from_vec(tys) {|ty| e::write_type(ecx, self, ty) } @@ -741,11 +741,11 @@ impl decoder for ebml::ebml_deserializer { {|a|xcx.tr_def_id(a)}) } - fn read_tys(xcx: extended_decode_ctxt) -> [ty::t] { + fn read_tys(xcx: extended_decode_ctxt) -> [ty::t]/~ { self.read_to_vec {|| self.read_ty(xcx) } } - fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound] { + fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound]/~ { tydecode::parse_bounds_data( self.parent.data, self.pos, xcx.dcx.cdata.cnum, xcx.dcx.tcx, {|a|xcx.tr_def_id(a)}) @@ -861,7 +861,7 @@ type fake_session = (); #[cfg(test)] impl of fake_ext_ctxt for fake_session { - fn cfg() -> ast::crate_cfg { [] } + fn cfg() -> ast::crate_cfg { []/~ } fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) } } @@ -924,13 +924,13 @@ fn test_simplification() { let item_in = ast::ii_item(#ast(item) { fn new_int_alist() -> alist { fn eq_int(&&a: int, &&b: int) -> bool { a == b } - ret {eq_fn: eq_int, mut data: []}; + ret {eq_fn: eq_int, mut data: []/~}; } }); let item_out = simplify_ast(item_in); let item_exp = ast::ii_item(#ast(item) { fn new_int_alist() -> alist { - ret {eq_fn: eq_int, mut data: []}; + ret {eq_fn: eq_int, mut data: []/~}; } }); alt (item_out, item_exp) { diff --git a/src/rustc/middle/borrowck/check_loans.rs b/src/rustc/middle/borrowck/check_loans.rs index c1a72dff3ac..af8e1860e53 100644 --- a/src/rustc/middle/borrowck/check_loans.rs +++ b/src/rustc/middle/borrowck/check_loans.rs @@ -23,7 +23,7 @@ enum check_loan_ctxt = @{ // we are in a ctor, we track the self id mut in_ctor: bool, mut declared_purity: ast::purity, - mut fn_args: @[ast::node_id] + mut fn_args: @[ast::node_id]/~ }; // if we are enforcing purity, why are we doing so? @@ -45,7 +45,7 @@ fn check_loans(bccx: borrowck_ctxt, reported: int_hash(), mut in_ctor: false, mut declared_purity: ast::impure_fn, - mut fn_args: @[]}); + mut fn_args: @[]/~}); let vt = visit::mk_vt(@{visit_expr: check_loans_in_expr, visit_local: check_loans_in_local, visit_block: check_loans_in_block, @@ -473,7 +473,7 @@ impl methods for check_loan_ctxt { callee: option<@ast::expr>, callee_id: ast::node_id, callee_span: span, - args: [@ast::expr]) { + args: [@ast::expr]/~) { alt self.purity(expr.id) { none {} some(pc) { @@ -618,7 +618,7 @@ fn check_loans_in_expr(expr: @ast::expr, none, ast_util::op_expr_callee_id(expr), expr.span, - [rval]); + [rval]/~); } ast::expr_unary(*) | ast::expr_index(*) if self.bccx.method_map.contains_key(expr.id) { @@ -626,7 +626,7 @@ fn check_loans_in_expr(expr: @ast::expr, none, ast_util::op_expr_callee_id(expr), expr.span, - []); + []/~); } _ { } } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index df46a4cef8f..5c4b804164c 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -86,12 +86,11 @@ fn req_loans_in_expr(ex: @ast::expr, // passing the buck onto us to enforce this) // // FIXME (#2493): this handling is not really adequate. - // For example, if there is a type like, {f: [int]}, we + // For example, if there is a type like, {f: [int]/~}, we // will ignore it, but we ought to be requiring it to be // immutable (whereas something like {f:int} would be // fine). // - alt opt_deref_kind(arg_ty.ty) { some(deref_ptr(region_ptr)) | some(deref_ptr(unsafe_ptr)) { @@ -281,7 +280,7 @@ impl methods for gather_loan_ctxt { } none { self.req_maps.req_loan_map.insert( - scope_id, @dvec::from_vec([mut loans])); + scope_id, @dvec::from_vec([mut loans]/~)); } } } diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs index fe7aeb8e5f0..07bf22aedce 100644 --- a/src/rustc/middle/capture.rs +++ b/src/rustc/middle/capture.rs @@ -60,7 +60,7 @@ fn check_capture_clause(tcx: ty::ctxt, fn compute_capture_vars(tcx: ty::ctxt, fn_expr_id: ast::node_id, fn_proto: ast::proto, - cap_clause: ast::capture_clause) -> [capture_var] { + cap_clause: ast::capture_clause) -> [capture_var]/~ { let freevars = freevars::get_freevars(tcx, fn_expr_id); let cap_map = map::int_hash(); @@ -119,7 +119,7 @@ fn compute_capture_vars(tcx: ty::ctxt, } } - let mut result = []; - for cap_map.each_value { |cap_var| result += [cap_var]; } + let mut result = []/~; + for cap_map.each_value { |cap_var| result += [cap_var]/~; } ret result; } diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs index 20bbce07d75..e9681c2d8cf 100644 --- a/src/rustc/middle/check_alt.rs +++ b/src/rustc/middle/check_alt.rs @@ -36,18 +36,18 @@ fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) { } // Check for unreachable patterns -fn check_arms(tcx: ty::ctxt, arms: [arm]) { - let mut seen = []; +fn check_arms(tcx: ty::ctxt, arms: [arm]/~) { + let mut seen = []/~; for arms.each {|arm| for arm.pats.each {|pat| - let v = [pat]; + let v = [pat]/~; alt is_useful(tcx, seen, v) { not_useful { tcx.sess.span_err(pat.span, "unreachable pattern"); } _ {} } - if option::is_none(arm.guard) { seen += [v]; } + if option::is_none(arm.guard) { seen += [v]/~; } } } } @@ -59,8 +59,8 @@ fn raw_pat(p: @pat) -> @pat { } } -fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) { - let ext = alt is_useful(tcx, vec::map(pats, {|p| [p]}), [wild()]) { +fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]/~) { + let ext = alt is_useful(tcx, vec::map(pats, {|p| [p]/~}), [wild()]/~) { not_useful { ret; } // This is good, wildcard pattern isn't reachable useful_ { none } useful(ty, ctor) { @@ -89,7 +89,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) { tcx.sess.span_err(sp, msg); } -type matrix = [[@pat]]; +type matrix = [[@pat]/~]/~; enum useful { useful(ty::t, ctor), useful_, not_useful } @@ -111,7 +111,7 @@ enum ctor { // checking (if a wildcard pattern is useful in relation to a matrix, the // matrix isn't exhaustive). -fn is_useful(tcx: ty::ctxt, m: matrix, v: [@pat]) -> useful { +fn is_useful(tcx: ty::ctxt, m: matrix, v: [@pat]/~) -> useful { if m.len() == 0u { ret useful_; } if m[0].len() == 0u { ret not_useful; } let real_pat = alt vec::find(m, {|r| r[0].id != 0}) { @@ -167,7 +167,7 @@ fn is_useful(tcx: ty::ctxt, m: matrix, v: [@pat]) -> useful { } } -fn is_useful_specialized(tcx: ty::ctxt, m: matrix, v: [@pat], ctor: ctor, +fn is_useful_specialized(tcx: ty::ctxt, m: matrix, v: [@pat]/~, ctor: ctor, arity: uint, lty: ty::t) -> useful { let ms = vec::filter_map(m, {|r| specialize(tcx, r, ctor, arity, lty)}); alt is_useful(tcx, ms, option::get(specialize(tcx, v, ctor, arity, lty))){ @@ -217,10 +217,10 @@ fn missing_ctor(tcx: ty::ctxt, m: matrix, left_ty: ty::t) -> option { ret some(single); } ty::ty_enum(eid, _) { - let mut found = []; + let mut found = []/~; for m.each {|r| option::iter(pat_ctor_id(tcx, r[0])) {|id| - if !vec::contains(found, id) { found += [id]; } + if !vec::contains(found, id) { found += [id]/~; } } } let variants = ty::enum_variants(tcx, eid); @@ -270,8 +270,8 @@ fn wild() -> @pat { @{id: 0, node: pat_wild, span: syntax::ast_util::dummy_sp()} } -fn specialize(tcx: ty::ctxt, r: [@pat], ctor_id: ctor, arity: uint, - left_ty: ty::t) -> option<[@pat]> { +fn specialize(tcx: ty::ctxt, r: [@pat]/~, ctor_id: ctor, arity: uint, + left_ty: ty::t) -> option<[@pat]/~> { let r0 = raw_pat(r[0]); alt r0.node { pat_wild { some(vec::from_elem(arity, wild()) + vec::tail(r)) } @@ -308,7 +308,7 @@ fn specialize(tcx: ty::ctxt, r: [@pat], ctor_id: ctor, arity: uint, some(args + vec::tail(r)) } pat_tup(args) { some(args + vec::tail(r)) } - pat_box(a) | pat_uniq(a) { some([a] + vec::tail(r)) } + pat_box(a) | pat_uniq(a) { some([a]/~ + vec::tail(r)) } pat_lit(expr) { let e_v = eval_const_expr(tcx, expr); let match = alt check ctor_id { @@ -334,7 +334,7 @@ fn specialize(tcx: ty::ctxt, r: [@pat], ctor_id: ctor, arity: uint, } } -fn default(tcx: ty::ctxt, r: [@pat]) -> option<[@pat]> { +fn default(tcx: ty::ctxt, r: [@pat]/~) -> option<[@pat]/~> { if is_wild(tcx, r[0]) { some(vec::tail(r)) } else { none } } diff --git a/src/rustc/middle/freevars.rs b/src/rustc/middle/freevars.rs index df6a718de1d..bbfc16b64b2 100644 --- a/src/rustc/middle/freevars.rs +++ b/src/rustc/middle/freevars.rs @@ -23,7 +23,7 @@ type freevar_entry = { def: ast::def, //< The variable being accessed free. span: span //< First span where it is accessed (there can be multiple) }; -type freevar_info = @[@freevar_entry]; +type freevar_info = @[@freevar_entry]/~; type freevar_map = hashmap; // Searches through part of the AST for all references to locals or @@ -34,7 +34,7 @@ type freevar_map = hashmap; fn collect_freevars(def_map: resolve::def_map, blk: ast::blk) -> freevar_info { let seen = int_hash(); - let refs = @mut []; + let refs = @mut []/~; fn ignore_item(_i: @ast::item, &&_depth: int, _v: visit::vt) { } @@ -64,7 +64,7 @@ fn collect_freevars(def_map: resolve::def_map, blk: ast::blk) if i == depth { // Made it to end of loop let dnum = ast_util::def_id_of_def(def).node; if !seen.contains_key(dnum) { - *refs += [@{def:def, span:expr.span}]; + *refs += [@{def:def, span:expr.span}]/~; seen.insert(dnum, ()); } } diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs index 465799c850c..b34880de3f2 100644 --- a/src/rustc/middle/kind.rs +++ b/src/rustc/middle/kind.rs @@ -40,15 +40,15 @@ import lint::{non_implicitly_copyable_typarams,implicit_copies}; // types. fn kind_to_str(k: kind) -> str { - let mut kinds = []; + let mut kinds = []/~; if ty::kind_lteq(kind_const(), k) { - kinds += ["const"]; + vec::push(kinds, "const"); } if ty::kind_can_be_copied(k) { - kinds += ["copy"]; + vec::push(kinds, "copy"); } if ty::kind_can_be_sent(k) { - kinds += ["send"]; + vec::push(kinds, "send"); } str::connect(kinds, " ") } @@ -160,7 +160,7 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span, let cap_clause = alt fk { visit::fk_anon(_, cc) | visit::fk_fn_block(cc) { cc } visit::fk_item_fn(*) | visit::fk_method(*) | - visit::fk_ctor(*) | visit::fk_dtor(*) { @[] } + visit::fk_ctor(*) | visit::fk_dtor(*) { @[]/~ } }; let captured_vars = (*cap_clause).map { |cap_item| let cap_def = cx.tcx.def_map.get(cap_item.id); diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs index 0561783f2f6..3c93bf5f5c5 100644 --- a/src/rustc/middle/lint.rs +++ b/src/rustc/middle/lint.rs @@ -134,7 +134,7 @@ fn get_lint_dict() -> lint_dict { desc: "implicit copies of non implicitly copyable data", default: warn}) - ]; + ]/~; hash_from_strs(v) } @@ -206,7 +206,7 @@ impl methods for ctxt { current lint context, call the provided function, then reset the warnings in effect to their previous state. "] - fn with_warn_attrs(attrs: [ast::attribute], f: fn(ctxt)) { + fn with_warn_attrs(attrs: [ast::attribute]/~, f: fn(ctxt)) { let mut new_ctxt = self; @@ -358,7 +358,7 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) { fn check_native_fn(cx: ty::ctxt, fn_id: ast::node_id, decl: ast::fn_decl) { let tys = vec::map(decl.inputs) {|a| a.ty }; - for vec::each(tys + [decl.output]) {|ty| + for vec::each(tys + [decl.output]/~) {|ty| alt ty.node { ast::ty_path(_, id) { alt cx.def_map.get(id) { @@ -456,7 +456,7 @@ fn check_item_old_vecs(cx: ty::ctxt, it: @ast::item) { } ast::ty_path(@{span: _, global: _, idents: ids, rp: none, types: _}, _) - if ids == [@"str"] && (! uses_vstore.contains_key(t.id)) { + if ids == [@"str"]/~ && (! uses_vstore.contains_key(t.id)) { cx.sess.span_lint( old_strs, t.id, it.id, t.span, "deprecated str type"); diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs index 9ad4009c513..836dced30c4 100644 --- a/src/rustc/middle/liveness.rs +++ b/src/rustc/middle/liveness.rs @@ -220,9 +220,9 @@ class ir_maps { let live_node_map: hashmap; let variable_map: hashmap; let field_map: hashmap; - let capture_map: hashmap; - let mut var_kinds: [var_kind]; - let mut lnks: [live_node_kind]; + let capture_map: hashmap; + let mut var_kinds: [var_kind]/~; + let mut lnks: [live_node_kind]/~; new(tcx: ty::ctxt, method_map: typeck::method_map, last_use_map: last_use_map) { @@ -236,13 +236,13 @@ class ir_maps { self.variable_map = int_hash(); self.capture_map = int_hash(); self.field_map = box_str_hash(); - self.var_kinds = []; - self.lnks = []; + self.var_kinds = []/~; + self.lnks = []/~; } fn add_live_node(lnk: live_node_kind) -> live_node { let ln = live_node(self.num_live_nodes); - self.lnks += [lnk]; + self.lnks += [lnk]/~; self.num_live_nodes += 1u; #debug["%s is of kind %?", ln.to_str(), lnk]; @@ -259,7 +259,7 @@ class ir_maps { fn add_variable(vk: var_kind) -> variable { let v = variable(self.num_vars); - self.var_kinds += [vk]; + self.var_kinds += [vk]/~; self.num_vars += 1u; alt vk { @@ -297,11 +297,11 @@ class ir_maps { } } - fn set_captures(node_id: node_id, +cs: [capture_info]) { + fn set_captures(node_id: node_id, +cs: [capture_info]/~) { self.capture_map.insert(node_id, @cs); } - fn captures(expr: @expr) -> @[capture_info] { + fn captures(expr: @expr) -> @[capture_info]/~ { alt self.capture_map.find(expr.id) { some(caps) {caps} none { @@ -435,7 +435,7 @@ fn visit_expr(expr: @expr, &&self: @ir_maps, vt: vt<@ir_maps>) { let proto = ty::ty_fn_proto(ty::expr_ty(self.tcx, expr)); let cvs = capture::compute_capture_vars(self.tcx, expr.id, proto, cap_clause); - let mut call_caps = []; + let mut call_caps = []/~; for cvs.each { |cv| alt relevant_def(cv.def) { some(rv) { @@ -444,7 +444,7 @@ fn visit_expr(expr: @expr, &&self: @ir_maps, vt: vt<@ir_maps>) { cap_move | cap_drop {true} // var must be dead afterwards cap_copy | cap_ref {false} // var can still be used }; - call_caps += [{ln: cv_ln, is_move: is_move, rv: rv}]; + call_caps += [{ln: cv_ln, is_move: is_move, rv: rv}]/~; } none {} } @@ -511,8 +511,8 @@ class liveness { let tcx: ty::ctxt; let ir: @ir_maps; let s: specials; - let successors: [mut live_node]; - let users: [mut users]; + let successors: [mut live_node]/~; + let users: [mut users]/~; let mut break_ln: live_node; let mut cont_ln: live_node; @@ -887,7 +887,8 @@ class liveness { succ } - fn propagate_through_exprs(exprs: [@expr], succ: live_node) -> live_node { + fn propagate_through_exprs(exprs: [@expr]/~, + succ: live_node) -> live_node { exprs.foldr(succ) { |expr, succ| self.propagate_through_expr(expr, succ) } @@ -1095,7 +1096,7 @@ class liveness { expr_log(_, l, r) | expr_index(l, r) | expr_binary(_, l, r) { - self.propagate_through_exprs([l, r], succ) + self.propagate_through_exprs([l, r]/~, succ) } expr_assert(e) | diff --git a/src/rustc/middle/pat_util.rs b/src/rustc/middle/pat_util.rs index 706a6fb2264..fa5ab8fc5c5 100644 --- a/src/rustc/middle/pat_util.rs +++ b/src/rustc/middle/pat_util.rs @@ -49,8 +49,8 @@ fn pat_bindings(dm: resolve::def_map, pat: @pat, } } -fn pat_binding_ids(dm: resolve::def_map, pat: @pat) -> [node_id] { - let mut found = []; - pat_bindings(dm, pat) {|b_id, _sp, _pt| found += [b_id]; }; +fn pat_binding_ids(dm: resolve::def_map, pat: @pat) -> [node_id]/~ { + let mut found = []/~; + pat_bindings(dm, pat) {|b_id, _sp, _pt| found += [b_id]/~; }; ret found; } diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index e17343a0b9e..6b79b3c6aa8 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -214,14 +214,14 @@ fn nearest_common_ancestor(region_map: region_map, scope_a: ast::node_id, scope_b: ast::node_id) -> option { fn ancestors_of(region_map: region_map, scope: ast::node_id) - -> [ast::node_id] { - let mut result = [scope]; + -> [ast::node_id]/~ { + let mut result = [scope]/~; let mut scope = scope; loop { alt region_map.find(scope) { none { ret result; } some(superscope) { - result += [superscope]; + result += [superscope]/~; scope = superscope; } } @@ -235,7 +235,7 @@ fn nearest_common_ancestor(region_map: region_map, scope_a: ast::node_id, let mut a_index = vec::len(a_ancestors) - 1u; let mut b_index = vec::len(b_ancestors) - 1u; - // Here, [ab]_ancestors is a vector going from narrow to broad. + // Here, [ab]/~_ancestors is a vector going from narrow to broad. // The end of each vector will be the item where the scope is // defined; if there are any common ancestors, then the tails of // the vector will be the same. So basically we want to walk diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index b693d83e193..340f05e4616 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -34,13 +34,13 @@ enum scope { scope_toplevel, scope_crate, scope_item(@ast::item), - scope_bare_fn(ast::fn_decl, node_id, [ast::ty_param]), - scope_fn_expr(ast::fn_decl, node_id, [ast::ty_param]), + scope_bare_fn(ast::fn_decl, node_id, [ast::ty_param]/~), + scope_fn_expr(ast::fn_decl, node_id, [ast::ty_param]/~), scope_native_item(@ast::native_item), scope_loop(@ast::local), // there's only 1 decl per loop. scope_block(ast::blk, @mut uint, @mut uint), scope_arm(ast::arm), - scope_method(node_id, [ast::ty_param]), + scope_method(node_id, [ast::ty_param]/~), } type scopes = @list; @@ -50,13 +50,13 @@ fn top_scope() -> scopes { } enum import_state { - todo(ast::ident, @[ast::ident], span, scopes), - is_glob(@[ast::ident], scopes, span), + todo(ast::ident, @[ast::ident]/~, span, scopes), + is_glob(@[ast::ident]/~, scopes, span), resolving(span), resolved(option, /* value */ option, /* type */ option, /* module */ - @[@_impl], /* impls */ + @[@_impl]/~, /* impls */ /* used for reporting unused import warning */ ast::ident, span), } @@ -88,7 +88,7 @@ enum mod_index_entry { mie_item(@ast::item), mie_native_item(@ast::native_item), mie_enum_variant(/* variant index */uint, - /*parts of enum item*/ [variant], + /*parts of enum item*/ [variant]/~, node_id, span), } @@ -101,7 +101,7 @@ type indexed_mod = { m: option, index: mod_index, glob_imports: dvec, - mut globbed_exports: [ident], + mut globbed_exports: [ident]/~, glob_imported_names: hashmap, path: str }; @@ -111,12 +111,12 @@ type indexed_mod = { control.*/ type def_map = hashmap; -type ext_map = hashmap; +type ext_map = hashmap; type impl_map = hashmap; -type impl_cache = hashmap>; +type impl_cache = hashmap>; type exp = {reexp: bool, id: def_id}; -type exp_map = hashmap; +type exp_map = hashmap; type env = {cstore: cstore::cstore, @@ -125,15 +125,15 @@ type env = imports: hashmap, exp_map: exp_map, mod_map: hashmap, - block_map: hashmap, + block_map: hashmap, ext_map: ext_map, impl_map: impl_map, impl_cache: impl_cache, ext_cache: ext_hash, used_imports: {mut track: bool, - mut data: [node_id]}, + mut data: [node_id]/~}, reported: dvec<{ident: ast::ident, sc: scope}>, - mut ignored_imports: [node_id], + mut ignored_imports: [node_id]/~, mut current_tp: option, mut resolve_unexported: bool, sess: session}; @@ -181,9 +181,9 @@ fn create_env(sess: session, amap: ast_map::map) -> @env { impl_map: int_hash(), impl_cache: new_def_hash(), ext_cache: new_ext_hash(), - used_imports: {mut track: false, mut data: []}, + used_imports: {mut track: false, mut data: []/~}, reported: dvec(), - mut ignored_imports: [], + mut ignored_imports: []/~, mut current_tp: none, mut resolve_unexported: false, sess: sess} @@ -243,7 +243,7 @@ fn map_crate(e: @env, c: @ast::crate) { ast::view_path_list(mod_path, idents, _) { for idents.each {|ident| let t = todo(ident.node.name, - @(mod_path.idents + [ident.node.name]), + @(mod_path.idents + [ident.node.name]/~), ident.span, sc); e.imports.insert(ident.node.id, t); } @@ -271,7 +271,7 @@ fn map_crate(e: @env, c: @ast::crate) { @{m: some(md), index: index_mod(md), glob_imports: dvec(), - mut globbed_exports: [], + mut globbed_exports: []/~, glob_imported_names: box_str_hash(), path: path_from_scope(sc, *i.ident)}); } @@ -280,7 +280,7 @@ fn map_crate(e: @env, c: @ast::crate) { @{m: none::, index: index_nmod(nmd), glob_imports: dvec(), - mut globbed_exports: [], + mut globbed_exports: []/~, glob_imported_names: box_str_hash(), path: path_from_scope(sc, *i.ident)}); } @@ -305,8 +305,8 @@ fn map_crate(e: @env, c: @ast::crate) { } scope_block(b, _, _) { let globs = alt e.block_map.find(b.node.id) { - some(globs) { globs + [glob] } - none { [glob] } + some(globs) { globs + [glob]/~ } + none { [glob]/~ } }; e.block_map.insert(b.node.id, globs); } @@ -339,7 +339,7 @@ fn map_crate(e: @env, c: @ast::crate) { @{m: some(c.node.module), index: index_mod(c.node.module), glob_imports: dvec(), - mut globbed_exports: [], + mut globbed_exports: []/~, glob_imported_names: box_str_hash(), path: ""}); @@ -476,7 +476,8 @@ fn resolve_names(e: @env, c: @ast::crate) { _ { } } } - fn walk_tps(e: @env, tps: [ast::ty_param], &&sc: scopes, v: vt) { + fn walk_tps(e: @env, tps: [ast::ty_param]/~, + &&sc: scopes, v: vt) { let outer_current_tp = e.current_tp; let mut current = 0u; for tps.each {|tp| @@ -557,7 +558,7 @@ fn visit_item_with_scope(e: @env, i: @ast::item, for methods.each {|m| v.visit_ty_params(m.tps, sc, v); let msc = @cons(scope_method(m.self_id, tps + m.tps), sc); - v.visit_fn(visit::fk_method(m.ident, [], m), + v.visit_fn(visit::fk_method(m.ident, []/~, m), m.decl, m.body, m.span, m.id, msc, v); } } @@ -625,7 +626,7 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl, // is this a main fn declaration? alt fk { visit::fk_item_fn(nm, _) { - if is_main_name([ast_map::path_name(nm)]) && + if is_main_name([ast_map::path_name(nm)]/~) && !e.sess.building_library { // This is a main function -- set it in the session // as the main ID @@ -643,9 +644,9 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl, | visit::fk_ctor(_, tps, _, _) | visit::fk_dtor(tps, _, _) { scope_bare_fn(decl, id, tps) } visit::fk_anon(ast::proto_bare, _) { - scope_bare_fn(decl, id, []) } + scope_bare_fn(decl, id, []/~) } visit::fk_anon(_, _) | visit::fk_fn_block(_) { - scope_fn_expr(decl, id, []) } + scope_fn_expr(decl, id, []/~) } }; visit::visit_fn(fk, decl, body, sp, id, @cons(scope, sc), v); @@ -713,7 +714,7 @@ fn visit_local_with_scope(e: @env, loc: @local, &&sc: scopes, v:vt) { } -fn follow_import(e: env, &&sc: scopes, path: [ident], sp: span) -> +fn follow_import(e: env, &&sc: scopes, path: [ident]/~, sp: span) -> option { let path_len = vec::len(path); let mut dcur = lookup_in_scope_strict(e, sc, sp, path[0], ns_module); @@ -757,10 +758,10 @@ fn resolve_constr(e: @env, c: @ast::constr, &&sc: scopes, _v: vt) { // Import resolution fn resolve_import(e: env, n_id: node_id, name: ast::ident, - ids: [ast::ident], sp: codemap::span, &&sc: scopes) { + ids: [ast::ident]/~, sp: codemap::span, &&sc: scopes) { fn register(e: env, id: node_id, cx: ctxt, sp: codemap::span, name: ast::ident, lookup: fn(namespace) -> option, - impls: [@_impl]) { + impls: [@_impl]/~) { let val = lookup(ns_val), typ = lookup(ns_type), md = lookup(ns_module); if is_none(val) && is_none(typ) && is_none(md) && @@ -772,21 +773,21 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident, } // Temporarily disable this import and the imports coming after during // resolution of this import. - fn find_imports_after(e: env, id: node_id, &&sc: scopes) -> [node_id] { - fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] { - let mut imports = [], found = false; + fn find_imports_after(e: env, id: node_id, &&sc: scopes) -> [node_id]/~ { + fn lst(my_id: node_id, vis: [@view_item]/~) -> [node_id]/~ { + let mut imports = []/~, found = false; for vis.each {|vi| iter_effective_import_paths(*vi) {|vp| alt vp.node { view_path_simple(_, _, id) | view_path_glob(_, id) { if id == my_id { found = true; } - if found { imports += [id]; } + if found { imports += [id]/~; } } view_path_list(_, ids, _) { for ids.each {|id| if id.node.id == my_id { found = true; } - if found { imports += [id.node.id]; } + if found { imports += [id.node.id]/~; } } } } @@ -822,7 +823,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident, let end_id = ids[n_idents - 1u]; if n_idents == 1u { register(e, n_id, in_scope(sc), sp, name, - {|ns| lookup_in_scope(e, sc, sp, end_id, ns, true) }, []); + {|ns| lookup_in_scope(e, sc, sp, end_id, ns, true) }, []/~); } else { alt lookup_in_scope(e, sc, sp, ids[0], ns_module, true) { none { @@ -832,7 +833,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident, let mut dcur = dcur_, i = 1u; loop { if i == n_idents - 1u { - let mut impls = []; + let mut impls = []/~; find_impls_in_mod(e, dcur, impls, some(end_id)); register(e, n_id, in_mod(dcur), sp, name, {|ns| lookup_in_mod(e, dcur, sp, end_id, ns, outside) @@ -860,7 +861,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident, // import alt e.imports.find(n_id) { some(resolving(sp)) { - e.imports.insert(n_id, resolved(none, none, none, @[], @"", sp)); + e.imports.insert(n_id, resolved(none, none, none, @[]/~, @"", sp)); } _ { } } @@ -910,7 +911,7 @@ fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) { path = @(e.mod_map.get(did.node).path + *path); } else if did.node != ast::crate_node_id { let paths = e.ext_map.get(did); - path = @str::connect((paths + [path]).map({|x|*x}), "::"); + path = @str::connect((paths + [path]/~).map({|x|*x}), "::"); } } } @@ -1110,7 +1111,7 @@ fn lookup_in_scope(e: env, &&sc: scopes, sp: span, name: ident, ns: namespace, ret none; } let mut left_fn = false; - let mut closing = []; + let mut closing = []/~; // Used to determine whether self is in scope let mut left_fn_level2 = false; let mut sc = sc; @@ -1150,7 +1151,7 @@ fn lookup_in_scope(e: env, &&sc: scopes, sp: span, name: ident, ns: namespace, } else if ns != ns_module { left_fn = scope_is_fn(hd); alt scope_closes(hd) { - some(node_id) { closing += [node_id]; } + some(node_id) { closing += [node_id]/~; } _ { } } } @@ -1160,7 +1161,7 @@ fn lookup_in_scope(e: env, &&sc: scopes, sp: span, name: ident, ns: namespace, }; } -fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param]) +fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param]/~) -> option { let mut n = 0u; for ty_params.each {|tp| @@ -1183,7 +1184,7 @@ fn lookup_in_pat(e: env, name: ident, pat: @ast::pat) -> option { } fn lookup_in_fn(e: env, name: ident, decl: ast::fn_decl, - ty_params: [ast::ty_param], + ty_params: [ast::ty_param]/~, ns: namespace) -> option { alt ns { ns_val { @@ -1367,7 +1368,7 @@ fn lookup_in_mod(e: env, m: def, sp: span, name: ident, ns: namespace, // examining a module in an external crate let cached = e.ext_cache.find({did: defid, ident: name, ns: ns}); if !is_none(cached) { ret cached; } - let mut path = [name]; + let mut path = [name]/~; if defid.node != ast::crate_node_id { path = cstore::get_path(e.cstore, defid) + path; } @@ -1420,7 +1421,7 @@ fn lookup_import(e: env, n_id: node_id, ns: namespace) -> option { } resolved(val, typ, md, _, _, _) { if e.used_imports.track { - e.used_imports.data += [n_id]; + e.used_imports.data += [n_id]/~; } ret alt ns { ns_val { val } ns_type { typ } ns_module { md } }; } @@ -1490,7 +1491,7 @@ fn lookup_in_local_mod(e: env, node_id: node_id, sp: span, id: ident, ret lookup_glob_in_mod(e, inf, sp, id, ns, outside); } -fn lookup_in_globs(e: env, globs: [glob_imp_def], sp: span, id: ident, +fn lookup_in_globs(e: env, globs: [glob_imp_def]/~, sp: span, id: ident, ns: namespace, dr: dir) -> option { fn lookup_in_mod_(e: env, def: glob_imp_def, sp: span, name: ident, ns: namespace, dr: dir) -> option { @@ -1595,7 +1596,7 @@ fn add_to_index(index: hashmap>, id: ident, } } -fn index_view_items(view_items: [@ast::view_item], +fn index_view_items(view_items: [@ast::view_item]/~, index: hashmap>) { for view_items.each {|vi| alt vi.node { @@ -1684,7 +1685,7 @@ fn ns_for_def(d: def) -> namespace { } } -fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) -> +fn lookup_external(e: env, cnum: int, ids: [ident]/~, ns: namespace) -> option { let mut result = none; for csearch::lookup_defs(e.sess.cstore, cnum, ids).each {|d| @@ -1757,9 +1758,9 @@ fn mie_span(mie: mod_index_entry) -> span { } fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) { - fn typaram_names(tps: [ast::ty_param]) -> [ident] { - let mut x: [ast::ident] = []; - for tps.each {|tp| x += [tp.ident]; } + fn typaram_names(tps: [ast::ty_param]/~) -> [ident]/~ { + let mut x: [ast::ident]/~ = []/~; + for tps.each {|tp| x += [tp.ident]/~; } ret x; } visit::visit_item(i, x, v); @@ -1914,7 +1915,7 @@ fn add_name(ch: checker, sp: span, name: ident) { ch.seen.push(name); } -fn ensure_unique(e: env, sp: span, elts: [T], id: fn(T) -> ident, +fn ensure_unique(e: env, sp: span, elts: [T]/~, id: fn(T) -> ident, kind: str) { let ch = checker(e, kind); for elts.each {|elt| add_name(ch, sp, id(elt)); } @@ -1942,7 +1943,7 @@ fn check_exports(e: @env) { let defs = [ found_def_item(item, ns_val), found_def_item(item, ns_type), - found_def_item(item, ns_module) ]; + found_def_item(item, ns_module) ]/~; for defs.each {|d| alt d { some(def) { @@ -1984,9 +1985,10 @@ fn check_exports(e: @env) { fn add_export(e: @env, export_id: node_id, target_id: def_id, reexp: bool) { let found = alt e.exp_map.find(export_id) { - some(f) { f } none { [] } + some(f) { f } none { []/~ } }; - e.exp_map.insert(export_id, found + [{reexp: reexp, id: target_id}]); + e.exp_map.insert(export_id, + found + [{reexp: reexp, id: target_id}]/~); } fn check_export(e: @env, ident: ident, _mod: @indexed_mod, @@ -2053,7 +2055,7 @@ fn check_exports(e: @env) { fn check_export_enum_list(e: @env, export_id: node_id, _mod: @indexed_mod, span: codemap::span, id: ast::ident, - ids: [ast::path_list_ident]) { + ids: [ast::path_list_ident]/~) { let parent_id = check_enum_ok(e, span, id, _mod); add_export(e, export_id, local_def(parent_id), false); for ids.each {|variant_id| @@ -2119,7 +2121,7 @@ fn check_exports(e: @env) { if ! glob_is_re_exported.contains_key(id) { cont; } iter_mod(*e, glob.def, glob.path.span, outside) {|ident, def| - _mod.globbed_exports += [ident]; + _mod.globbed_exports += [ident]/~; maybe_add_reexport(e, id, some(def)); } } @@ -2140,8 +2142,8 @@ type method_info = {did: def_id, n_tps: uint, ident: ast::ident}; is the ident of the iface that's being implemented * methods: the item's methods */ -type _impl = {did: def_id, ident: ast::ident, methods: [@method_info]}; -type iscopes = @list<@[@_impl]>; +type _impl = {did: def_id, ident: ast::ident, methods: [@method_info]/~}; +type iscopes = @list<@[@_impl]/~>; fn resolve_impls(e: @env, c: @ast::crate) { visit::visit_crate(*c, @nil, visit::mk_vt(@{ @@ -2153,9 +2155,9 @@ fn resolve_impls(e: @env, c: @ast::crate) { } fn find_impls_in_view_item(e: env, vi: @ast::view_item, - &impls: [@_impl], sc: option) { + &impls: [@_impl]/~, sc: option) { fn lookup_imported_impls(e: env, id: node_id, - act: fn(@[@_impl])) { + act: fn(@[@_impl]/~)) { alt e.imports.get(id) { resolved(_, _, _, is, _, _) { act(is); } todo(name, path, span, scopes) { @@ -2171,14 +2173,14 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item, iter_effective_import_paths(*vi) { |vp| alt vp.node { ast::view_path_simple(name, pt, id) { - let mut found = []; + let mut found = []/~; if vec::len(pt.idents) == 1u { option::iter(sc) {|sc| list::iter(sc) {|level| if vec::len(found) == 0u { for vec::each(*level) {|imp| if imp.ident == pt.idents[0] { - found += [@{ident: name with *imp}]; + found += [@{ident: name with *imp}]/~; } } if vec::len(found) > 0u { impls += found; } @@ -2188,7 +2190,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item, } else { lookup_imported_impls(e, id) {|is| for vec::each(*is) {|i| - impls += [@{ident: name with *i}]; + impls += [@{ident: name with *i}]/~; } } } @@ -2220,7 +2222,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item, item is a class; and none otherwise. Each record describes one interface implemented by i. */ -fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl], +fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl]/~, name: option, ck_exports: option<@indexed_mod>) { alt i.node { @@ -2236,7 +2238,7 @@ fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl], @{did: local_def(m.id), n_tps: vec::len(m.tps), ident: m.ident} - })}]; + })}]/~; } } ast::item_class(tps, ifces, items, _, _, _) { @@ -2250,14 +2252,14 @@ fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl], methods: vec::map(mthds, {|m| @{did: local_def(m.id), n_tps: n_tps + m.tps.len(), - ident: m.ident}})}]; + ident: m.ident}})}]/~; } } _ {} } } -fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl], +fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl]/~, name: option) { let mut cached; alt e.impl_cache.find(defid) { @@ -2266,7 +2268,7 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl], none { e.impl_cache.insert(defid, none); cached = if defid.crate == ast::local_crate { - let mut tmp = []; + let mut tmp = []/~; let mi = e.mod_map.get(defid.node); let md = option::get(mi.m); for md.view_items.each {|vi| @@ -2285,14 +2287,14 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl], alt name { some(n) { for vec::each(*cached) {|im| - if n == im.ident { impls += [im]; } + if n == im.ident { impls += [im]/~; } } } _ { impls += *cached; } } } -fn find_impls_in_mod(e: env, m: def, &impls: [@_impl], +fn find_impls_in_mod(e: env, m: def, &impls: [@_impl]/~, name: option) { alt m { ast::def_mod(defid) { @@ -2304,7 +2306,7 @@ fn find_impls_in_mod(e: env, m: def, &impls: [@_impl], fn visit_block_with_impl_scope(e: @env, b: ast::blk, &&sc: iscopes, v: vt) { - let mut impls = []; + let mut impls = []/~; for b.node.view_items.each {|vi| find_impls_in_view_item(*e, vi, impls, some(sc)); } @@ -2322,7 +2324,7 @@ fn visit_block_with_impl_scope(e: @env, b: ast::blk, &&sc: iscopes, fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id, &&sc: iscopes, v: vt) { - let mut impls = []; + let mut impls = []/~; for m.view_items.each {|vi| find_impls_in_view_item(*e, vi, impls, some(sc)); } diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index f3f53804c20..b7da0b82540 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -77,7 +77,7 @@ fn variant_opt(tcx: ty::ctxt, pat_id: ast::node_id) -> opt { core::unreachable(); } -type bind_map = [{ident: ast::ident, val: ValueRef}]; +type bind_map = [{ident: ast::ident, val: ValueRef}]/~; fn assoc(key: ast::ident, list: bind_map) -> option { for vec::each(list) {|elt| if str::eq(*elt.ident, *key) { ret some(elt.val); } @@ -86,12 +86,12 @@ fn assoc(key: ast::ident, list: bind_map) -> option { } type match_branch = - @{pats: [@ast::pat], + @{pats: [@ast::pat]/~, bound: bind_map, data: @{bodycx: block, guard: option<@ast::expr>, id_map: pat_id_map}}; -type match = [match_branch]; +type match = [match_branch]/~; fn has_nested_bindings(m: match, col: uint) -> bool { for vec::each(m) {|br| @@ -104,28 +104,28 @@ fn has_nested_bindings(m: match, col: uint) -> bool { } fn expand_nested_bindings(m: match, col: uint, val: ValueRef) -> match { - let mut result = []; + let mut result = []/~; for vec::each(m) {|br| alt br.pats[col].node { ast::pat_ident(name, some(inner)) { - let pats = vec::slice(br.pats, 0u, col) + [inner] + + let pats = vec::slice(br.pats, 0u, col) + [inner]/~ + vec::slice(br.pats, col + 1u, br.pats.len()); result += [@{pats: pats, bound: br.bound + [{ident: path_to_ident(name), - val: val}] - with *br}]; + val: val}]/~ + with *br}]/~; } - _ { result += [br]; } + _ { result += [br]/~; } } } result } -type enter_pat = fn(@ast::pat) -> option<[@ast::pat]>; +type enter_pat = fn(@ast::pat) -> option<[@ast::pat]/~>; fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef, e: enter_pat) -> match { - let mut result = []; + let mut result = []/~; for vec::each(m) {|br| alt e(br.pats[col]) { some(sub) { @@ -134,11 +134,11 @@ fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef, let self = br.pats[col]; let bound = alt self.node { ast::pat_ident(name, none) if !pat_is_variant(dm, self) { - br.bound + [{ident: path_to_ident(name), val: val}] + br.bound + [{ident: path_to_ident(name), val: val}]/~ } _ { br.bound } }; - result += [@{pats: pats, bound: bound with *br}]; + result += [@{pats: pats, bound: bound with *br}]/~; } none { } } @@ -149,9 +149,9 @@ fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef, fn enter_default(dm: def_map, m: match, col: uint, val: ValueRef) -> match { enter_match(dm, m, col, val) {|p| alt p.node { - ast::pat_wild | ast::pat_rec(_, _) | ast::pat_tup(_) { some([]) } + ast::pat_wild | ast::pat_rec(_, _) | ast::pat_tup(_) { some([]/~) } ast::pat_ident(_, none) if !pat_is_variant(dm, p) { - some([]) + some([]/~) } _ { none } } @@ -170,33 +170,33 @@ fn enter_opt(tcx: ty::ctxt, m: match, opt: opt, col: uint, else { none } } ast::pat_ident(_, none) if pat_is_variant(tcx.def_map, p) { - if opt_eq(tcx, variant_opt(tcx, p.id), opt) { some([]) } + if opt_eq(tcx, variant_opt(tcx, p.id), opt) { some([]/~) } else { none } } ast::pat_lit(l) { - if opt_eq(tcx, lit(l), opt) { some([]) } else { none } + if opt_eq(tcx, lit(l), opt) { some([]/~) } else { none } } ast::pat_range(l1, l2) { - if opt_eq(tcx, range(l1, l2), opt) { some([]) } else { none } + if opt_eq(tcx, range(l1, l2), opt) { some([]/~) } else { none } } _ { some(vec::from_elem(variant_size, dummy)) } } } } -fn enter_rec(dm: def_map, m: match, col: uint, fields: [ast::ident], +fn enter_rec(dm: def_map, m: match, col: uint, fields: [ast::ident]/~, val: ValueRef) -> match { let dummy = @{id: 0, node: ast::pat_wild, span: dummy_sp()}; enter_match(dm, m, col, val) {|p| alt p.node { ast::pat_rec(fpats, _) { - let mut pats = []; + let mut pats = []/~; for vec::each(fields) {|fname| let mut pat = dummy; for vec::each(fpats) {|fpat| if str::eq(*fpat.ident, *fname) { pat = fpat.pat; break; } } - pats += [pat]; + pats += [pat]/~; } some(pats) } @@ -220,8 +220,8 @@ fn enter_box(dm: def_map, m: match, col: uint, val: ValueRef) -> match { let dummy = @{id: 0, node: ast::pat_wild, span: dummy_sp()}; enter_match(dm, m, col, val) {|p| alt p.node { - ast::pat_box(sub) { some([sub]) } - _ { some([dummy]) } + ast::pat_box(sub) { some([sub]/~) } + _ { some([dummy]/~) } } } } @@ -230,13 +230,13 @@ fn enter_uniq(dm: def_map, m: match, col: uint, val: ValueRef) -> match { let dummy = @{id: 0, node: ast::pat_wild, span: dummy_sp()}; enter_match(dm, m, col, val) {|p| alt p.node { - ast::pat_uniq(sub) { some([sub]) } - _ { some([dummy]) } + ast::pat_uniq(sub) { some([sub]/~) } + _ { some([dummy]/~) } } } } -fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt] { +fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt]/~ { fn add_to_set(tcx: ty::ctxt, &&set: dvec, val: opt) { if set.any({|l| opt_eq(tcx, l, val)}) {ret;} set.push(val); @@ -262,7 +262,7 @@ fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt] { fn extract_variant_args(bcx: block, pat_id: ast::node_id, vdefs: {enm: def_id, var: def_id}, val: ValueRef) -> - {vals: [ValueRef], bcx: block} { + {vals: [ValueRef]/~, bcx: block} { let _icx = bcx.insn_ctxt("alt::extract_variant_args"); let ccx = bcx.fcx.ccx; let enum_ty_substs = alt check ty::get(node_id_type(bcx, pat_id)).struct { @@ -275,7 +275,7 @@ fn extract_variant_args(bcx: block, pat_id: ast::node_id, if size > 0u && (*variants).len() != 1u { let enumptr = PointerCast(bcx, val, T_opaque_enum_ptr(ccx)); - blobptr = GEPi(bcx, enumptr, [0u, 1u]); + blobptr = GEPi(bcx, enumptr, [0u, 1u]/~); } let vdefs_tg = vdefs.enm; let vdefs_var = vdefs.var; @@ -286,14 +286,14 @@ fn extract_variant_args(bcx: block, pat_id: ast::node_id, ret {vals: args, bcx: bcx}; } -fn collect_record_fields(m: match, col: uint) -> [ast::ident] { - let mut fields: [ast::ident] = []; +fn collect_record_fields(m: match, col: uint) -> [ast::ident]/~ { + let mut fields: [ast::ident]/~ = []/~; for vec::each(m) {|br| alt br.pats[col].node { ast::pat_rec(fs, _) { for vec::each(fs) {|f| if !vec::any(fields, {|x| str::eq(*f.ident, *x)}) { - fields += [f.ident]; + fields += [f.ident]/~; } } } @@ -375,8 +375,8 @@ fn pick_col(m: match) -> uint { ret best_col; } -fn compile_submatch(bcx: block, m: match, vals: [ValueRef], - chk: option, &exits: [exit_node]) { +fn compile_submatch(bcx: block, m: match, vals: [ValueRef]/~, + chk: option, &exits: [exit_node]/~) { let _icx = bcx.insn_ctxt("alt::compile_submatch"); let mut bcx = bcx; let tcx = bcx.tcx(), dm = tcx.def_map; @@ -405,7 +405,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], } if !bcx.unreachable { exits += [{bound: m[0].bound, from: bcx.llbb, - to: data.bodycx.llbb}]; + to: data.bodycx.llbb}]/~; } Br(bcx, data.bodycx.llbb); ret; @@ -433,10 +433,10 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], // Separate path for extracting and binding record fields if rec_fields.len() > 0u { let fields = ty::get_fields(node_id_type(bcx, pat_id)); - let mut rec_vals = []; + let mut rec_vals = []/~; for vec::each(rec_fields) {|field_name| let ix = option::get(ty::field_idx(field_name, fields)); - rec_vals += [GEPi(bcx, val, [0u, ix])]; + rec_vals += [GEPi(bcx, val, [0u, ix]/~)]/~; } compile_submatch(bcx, enter_rec(dm, m, col, rec_fields, val), rec_vals + vals_left, chk, exits); @@ -449,9 +449,9 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], ty::ty_tup(elts) { elts.len() } _ { ccx.sess.bug("non-tuple type in tuple pattern"); } }; - let mut tup_vals = [], i = 0u; + let mut tup_vals = []/~, i = 0u; while i < n_tup_elts { - tup_vals += [GEPi(bcx, val, [0u, i])]; + tup_vals += [GEPi(bcx, val, [0u, i]/~)]/~; i += 1u; } compile_submatch(bcx, enter_tup(dm, m, col, val, n_tup_elts), @@ -463,8 +463,9 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], if any_box_pat(m, col) { let box = Load(bcx, val); let box_no_addrspace = non_gc_box_cast(bcx, box); - let unboxed = GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]); - compile_submatch(bcx, enter_box(dm, m, col, val), [unboxed] + let unboxed = + GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]/~); + compile_submatch(bcx, enter_box(dm, m, col, val), [unboxed]/~ + vals_left, chk, exits); ret; } @@ -472,9 +473,10 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], if any_uniq_pat(m, col) { let box = Load(bcx, val); let box_no_addrspace = non_gc_box_cast(bcx, box); - let unboxed = GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]); + let unboxed = + GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]/~); compile_submatch(bcx, enter_uniq(dm, m, col, val), - [unboxed] + vals_left, chk, exits); + [unboxed]/~ + vals_left, chk, exits); ret; } @@ -491,7 +493,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], } else { let enumptr = PointerCast(bcx, val, T_opaque_enum_ptr(ccx)); - let discrimptr = GEPi(bcx, enumptr, [0u, 0u]); + let discrimptr = GEPi(bcx, enumptr, [0u, 0u]/~); test_val = Load(bcx, discrimptr); kind = switch; } @@ -567,7 +569,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], } } else if kind == compare { Br(bcx, else_cx.llbb); } let mut size = 0u; - let mut unpacked = []; + let mut unpacked = []/~; alt opt { var(_, vdef) { let args = extract_variant_args(opt_cx, pat_id, vdef, val); @@ -591,18 +593,18 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], } // Returns false for unreachable blocks -fn make_phi_bindings(bcx: block, map: [exit_node], +fn make_phi_bindings(bcx: block, map: [exit_node]/~, ids: pat_util::pat_id_map) -> bool { let _icx = bcx.insn_ctxt("alt::make_phi_bindings"); let our_block = bcx.llbb as uint; let mut success = true, bcx = bcx; for ids.each {|name, node_id| - let mut llbbs = []; - let mut vals = []; + let mut llbbs = []/~; + let mut vals = []/~; for vec::each(map) {|ex| if ex.to as uint == our_block { alt assoc(name, ex.bound) { - some(val) { llbbs += [ex.from]; vals += [val]; } + some(val) { llbbs += [ex.from]/~; vals += [val]/~; } none { } } } @@ -621,7 +623,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node], fn trans_alt(bcx: block, alt_expr: @ast::expr, expr: @ast::expr, - arms: [ast::arm], + arms: [ast::arm]/~, mode: ast::alt_mode, dest: dest) -> block { let _icx = bcx.insn_ctxt("alt::trans_alt"); @@ -630,11 +632,11 @@ fn trans_alt(bcx: block, } } -fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], +fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm]/~, mode: ast::alt_mode, dest: dest) -> block { let _icx = scope_cx.insn_ctxt("alt::trans_alt_inner"); let bcx = scope_cx, tcx = bcx.tcx(); - let mut bodies = [], match = []; + let mut bodies = []/~, match = []/~; let {bcx, val, _} = trans_temp_expr(bcx, expr); if bcx.unreachable { ret bcx; } @@ -642,12 +644,12 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], for vec::each(arms) {|a| let body = scope_block(bcx, a.body.info(), "case_body"); let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]); - bodies += [body]; + bodies += [body]/~; for vec::each(a.pats) {|p| - match += [@{pats: [p], - bound: [], + match += [@{pats: [p]/~, + bound: []/~, data: @{bodycx: body, guard: a.guard, - id_map: id_map}}]; + id_map: id_map}}]/~; } } @@ -667,21 +669,21 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], } ast::alt_exhaustive { none } }; - let mut exit_map = []; + let mut exit_map = []/~; let t = node_id_type(bcx, expr.id); let spilled = spill_if_immediate(bcx, val, t); - compile_submatch(bcx, match, [spilled], mk_fail, exit_map); + compile_submatch(bcx, match, [spilled]/~, mk_fail, exit_map); - let mut arm_cxs = [], arm_dests = [], i = 0u; + let mut arm_cxs = []/~, arm_dests = []/~, i = 0u; for vec::each(arms) {|a| let body_cx = bodies[i]; let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]); if make_phi_bindings(body_cx, exit_map, id_map) { let arm_dest = dup_for_join(dest); - arm_dests += [arm_dest]; + arm_dests += [arm_dest]/~; let mut arm_cx = trans_block(body_cx, a.body, arm_dest); arm_cx = trans_block_cleanups(arm_cx, body_cx); - arm_cxs += [arm_cx]; + arm_cxs += [arm_cx]/~; } i += 1u; } @@ -726,14 +728,14 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, let rec_fields = ty::get_fields(node_id_type(bcx, pat.id)); for vec::each(fields) {|f| let ix = option::get(ty::field_idx(f.ident, rec_fields)); - let fldptr = GEPi(bcx, val, [0u, ix]); + let fldptr = GEPi(bcx, val, [0u, ix]/~); bcx = bind_irrefutable_pat(bcx, f.pat, fldptr, make_copy); } } ast::pat_tup(elems) { let mut i = 0u; for vec::each(elems) {|elem| - let fldptr = GEPi(bcx, val, [0u, i]); + let fldptr = GEPi(bcx, val, [0u, i]/~); bcx = bind_irrefutable_pat(bcx, elem, fldptr, make_copy); i += 1u; } @@ -741,13 +743,13 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, ast::pat_box(inner) { let box = Load(bcx, val); let unboxed = - GEPi(bcx, box, [0u, abi::box_field_body]); + GEPi(bcx, box, [0u, abi::box_field_body]/~); bcx = bind_irrefutable_pat(bcx, inner, unboxed, true); } ast::pat_uniq(inner) { let box = Load(bcx, val); let unboxed = - GEPi(bcx, box, [0u, abi::box_field_body]); + GEPi(bcx, box, [0u, abi::box_field_body]/~); bcx = bind_irrefutable_pat(bcx, inner, unboxed, true); } ast::pat_wild | ast::pat_lit(_) | ast::pat_range(_, _) { } diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 43eb9f8c58d..5c36bcc7cda 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -102,7 +102,7 @@ impl ccx_icx for @crate_ctxt { fn insn_ctxt(s: str) -> icx_popper { #debug("new insn_ctxt: %s", s); if self.sess.count_llvm_insns() { - *self.stats.llvm_insn_ctxt += [s]; + *self.stats.llvm_insn_ctxt += [s]/~; } icx_popper(self) } @@ -120,8 +120,8 @@ impl fcx_icx for fn_ctxt { } } -fn join_returns(parent_cx: block, in_cxs: [block], - in_ds: [dest], out_dest: dest) -> block { +fn join_returns(parent_cx: block, in_cxs: [block]/~, + in_ds: [dest]/~, out_dest: dest) -> block { let out = sub_block(parent_cx, "join"); let mut reachable = false, i = 0u, phi = none; for vec::each(in_cxs) {|cx| @@ -172,7 +172,7 @@ fn log_fn_time(ccx: @crate_ctxt, name: str, start: time::timespec, end: time::timespec) { let elapsed = 1000 * ((end.sec - start.sec) as int) + ((end.nsec as int) - (start.nsec as int)) / 1000000; - *ccx.stats.fn_times += [{ident: name, time: elapsed}]; + *ccx.stats.fn_times += [{ident: name, time: elapsed}]/~; } @@ -228,29 +228,29 @@ fn get_simple_extern_fn(cx: block, } fn trans_native_call(cx: block, externs: hashmap, - llmod: ModuleRef, name: str, args: [ValueRef]) -> + llmod: ModuleRef, name: str, args: [ValueRef]/~) -> ValueRef { let _icx = cx.insn_ctxt("trans_native_call"); let n = args.len() as int; let llnative: ValueRef = get_simple_extern_fn(cx, externs, llmod, name, n); - let mut call_args: [ValueRef] = []; + let mut call_args: [ValueRef]/~ = []/~; for vec::each(args) {|a| - call_args += [a]; + call_args += [a]/~; } ret Call(cx, llnative, call_args); } fn trans_free(cx: block, v: ValueRef) -> block { let _icx = cx.insn_ctxt("trans_free"); - Call(cx, cx.ccx().upcalls.free, [PointerCast(cx, v, T_ptr(T_i8()))]); + Call(cx, cx.ccx().upcalls.free, [PointerCast(cx, v, T_ptr(T_i8()))]/~); cx } fn trans_unique_free(cx: block, v: ValueRef) -> block { let _icx = cx.insn_ctxt("trans_shared_free"); Call(cx, cx.ccx().upcalls.exchange_free, - [PointerCast(cx, v, T_ptr(T_i8()))]); + [PointerCast(cx, v, T_ptr(T_i8()))]/~); ret cx; } @@ -304,7 +304,7 @@ fn arrayalloca(cx: block, t: TypeRef, v: ValueRef) -> ValueRef { fn ptr_offs(bcx: block, base: ValueRef, sz: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("ptr_offs"); let raw = PointerCast(bcx, base, T_ptr(T_i8())); - InBoundsGEP(bcx, raw, [sz]) + InBoundsGEP(bcx, raw, [sz]/~) } // Increment a pointer by a given amount and then cast it to be a pointer @@ -322,7 +322,7 @@ fn bump_ptr(bcx: block, t: ty::t, base: ValueRef, sz: ValueRef) -> // @llblobptr is the data part of a enum value; its actual type // is meaningless, as it will be cast away. fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, - variant_id: ast::def_id, ty_substs: [ty::t], + variant_id: ast::def_id, ty_substs: [ty::t]/~, ix: uint) -> ValueRef { let _icx = bcx.insn_ctxt("GEP_enum"); let ccx = bcx.ccx(); @@ -334,7 +334,7 @@ fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, }); let typed_blobptr = PointerCast(bcx, llblobptr, T_ptr(T_struct(arg_lltys))); - GEPi(bcx, typed_blobptr, [0u, ix]) + GEPi(bcx, typed_blobptr, [0u, ix]/~) } // Returns a pointer to the body for the box. The box may be an opaque @@ -348,7 +348,7 @@ fn opaque_box_body(bcx: block, let _icx = bcx.insn_ctxt("opaque_box_body"); let ccx = bcx.ccx(); let boxptr = PointerCast(bcx, boxptr, T_ptr(T_box_header(ccx))); - let bodyptr = GEPi(bcx, boxptr, [1u]); + let bodyptr = GEPi(bcx, boxptr, [1u]/~); PointerCast(bcx, bodyptr, T_ptr(type_of(ccx, body_t))) } @@ -376,7 +376,7 @@ fn malloc_raw_dyn(bcx: block, t: ty::t, heap: heap, lazily_emit_all_tydesc_glue(ccx, copy static_ti); // Allocate space: - let rval = Call(bcx, upcall, [lltydesc, size]); + let rval = Call(bcx, upcall, [lltydesc, size]/~); ret PointerCast(bcx, rval, llty); } @@ -394,7 +394,7 @@ fn malloc_general_dyn(bcx: block, t: ty::t, heap: heap, size: ValueRef) -> let _icx = bcx.insn_ctxt("malloc_general"); let box = malloc_raw_dyn(bcx, t, heap, size); let non_gc_box = non_gc_box_cast(bcx, box); - let body = GEPi(bcx, non_gc_box, [0u, abi::box_field_body]); + let body = GEPi(bcx, non_gc_box, [0u, abi::box_field_body]/~); ret {box: box, body: body}; } @@ -462,7 +462,7 @@ fn set_inline_hint(f: ValueRef) { as c_ulonglong, 0u as c_ulonglong); } -fn set_inline_hint_if_appr(attrs: [ast::attribute], +fn set_inline_hint_if_appr(attrs: [ast::attribute]/~, llfn: ValueRef) { alt attr::find_inline_attr(attrs) { attr::ia_hint { set_inline_hint(llfn); } @@ -546,7 +546,7 @@ fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef, fn make_generic_glue_inner(ccx: @crate_ctxt, t: ty::t, llfn: ValueRef, helper: glue_helper) -> ValueRef { let _icx = ccx.insn_ctxt("make_generic_glue_inner"); - let fcx = new_fn_ctxt(ccx, [], llfn, none); + let fcx = new_fn_ctxt(ccx, []/~, llfn, none); lib::llvm::SetLinkage(llfn, lib::llvm::InternalLinkage); ccx.stats.n_glues_created += 1u; // Any nontrivial glue is with values passed *by alias*; this is a @@ -628,7 +628,7 @@ fn emit_tydescs(ccx: @crate_ctxt) { C_shape(ccx, shape), // shape shape_tables, // shape_tables C_int(ccx, 0), // ununsed - C_int(ccx, 0)]); // unused + C_int(ccx, 0)]/~); // unused let gvar = ti.tydesc; llvm::LLVMSetInitializer(gvar, tydesc); @@ -660,7 +660,7 @@ fn make_take_glue(bcx: block, v: ValueRef, t: ty::t) { closure::make_fn_glue(bcx, v, t, take_ty) } ty::ty_iface(_, _) { - let box = Load(bcx, GEPi(bcx, v, [0u, 1u])); + let box = Load(bcx, GEPi(bcx, v, [0u, 1u]/~)); incr_refcnt_of_boxed(bcx, box); bcx } @@ -680,7 +680,7 @@ fn incr_refcnt_of_boxed(cx: block, box_ptr: ValueRef) { let _icx = cx.insn_ctxt("incr_refcnt_of_boxed"); let ccx = cx.ccx(); maybe_validate_box(cx, box_ptr); - let rc_ptr = GEPi(cx, box_ptr, [0u, abi::box_field_refcnt]); + let rc_ptr = GEPi(cx, box_ptr, [0u, abi::box_field_refcnt]/~); let rc = Load(cx, rc_ptr); let rc = Add(cx, rc, C_int(ccx, 1)); Store(cx, rc, rc_ptr); @@ -706,14 +706,14 @@ fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) { let bcx = alt ty::get(t).struct { ty::ty_box(body_mt) { let v = PointerCast(bcx, v, type_of(ccx, t)); - let body = GEPi(bcx, v, [0u, abi::box_field_body]); + let body = GEPi(bcx, v, [0u, abi::box_field_body]/~); let bcx = drop_ty(bcx, body, body_mt.ty); trans_free(bcx, v) } ty::ty_opaque_box { let v = PointerCast(bcx, v, type_of(ccx, t)); - let td = Load(bcx, GEPi(bcx, v, [0u, abi::box_field_tydesc])); - let valptr = GEPi(bcx, v, [0u, abi::box_field_body]); + let td = Load(bcx, GEPi(bcx, v, [0u, abi::box_field_tydesc]/~)); + let valptr = GEPi(bcx, v, [0u, abi::box_field_body]/~); call_tydesc_glue_full(bcx, valptr, td, abi::tydesc_field_drop_glue, none); trans_free(bcx, v) @@ -752,11 +752,11 @@ fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) { fn trans_class_drop(bcx: block, v0: ValueRef, dtor_did: ast::def_id, class_did: ast::def_id, substs: ty::substs) -> block { - let drop_flag = GEPi(bcx, v0, [0u, 0u]); + let drop_flag = GEPi(bcx, v0, [0u, 0u]/~); with_cond(bcx, IsNotNull(bcx, Load(bcx, drop_flag))) {|cx| let mut bcx = cx; // We have to cast v0 - let classptr = GEPi(bcx, v0, [0u, 1u]); + let classptr = GEPi(bcx, v0, [0u, 1u]/~); // Find and call the actual destructor let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, class_did, substs.tps); // The second argument is the "self" argument for drop @@ -767,13 +767,13 @@ fn trans_class_drop(bcx: block, v0: ValueRef, dtor_did: ast::def_id, // of the output pointer and the environment (self) assert(params.len() == 2u); let self_arg = PointerCast(bcx, v0, params[1u]); - let args = [bcx.fcx.llretptr, self_arg]; + let args = [bcx.fcx.llretptr, self_arg]/~; Call(bcx, dtor_addr, args); // Drop the fields for vec::eachi(ty::class_items_as_mutable_fields(bcx.tcx(), class_did, substs)) {|i, fld| - let llfld_a = GEPi(bcx, classptr, [0u, i]); + let llfld_a = GEPi(bcx, classptr, [0u, i]/~); bcx = drop_ty(bcx, llfld_a, fld.mt.ty); } Store(bcx, C_u8(0u), drop_flag); @@ -814,7 +814,7 @@ fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) { closure::make_fn_glue(bcx, v0, t, drop_ty) } ty::ty_iface(_, _) { - let box = Load(bcx, GEPi(bcx, v0, [0u, 1u])); + let box = Load(bcx, GEPi(bcx, v0, [0u, 1u]/~)); decr_refcnt_maybe_free(bcx, box, ty::mk_opaque_box(ccx.tcx)) } ty::ty_opaque_closure_ptr(ck) { @@ -831,7 +831,7 @@ fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) { } fn get_res_dtor(ccx: @crate_ctxt, did: ast::def_id, - parent_id: ast::def_id, substs: [ty::t]) + parent_id: ast::def_id, substs: [ty::t]/~) -> ValueRef { let _icx = ccx.insn_ctxt("trans_res_dtor"); if (substs.len() > 0u) { @@ -861,7 +861,7 @@ fn maybe_validate_box(_cx: block, _box_ptr: ValueRef) { // let ccx = cx.ccx(); // warn_not_to_commit(ccx, "validate_box() is uncommented"); // let raw_box_ptr = PointerCast(cx, box_ptr, T_ptr(T_i8())); - // Call(cx, ccx.upcalls.validate_box, [raw_box_ptr]); + // Call(cx, ccx.upcalls.validate_box, [raw_box_ptr]/~); } fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef, t: ty::t) -> block { @@ -872,7 +872,7 @@ fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef, t: ty::t) -> block { let llbox_ty = T_opaque_box_ptr(ccx); let box_ptr = PointerCast(bcx, box_ptr, llbox_ty); with_cond(bcx, IsNotNull(bcx, box_ptr)) {|bcx| - let rc_ptr = GEPi(bcx, box_ptr, [0u, abi::box_field_refcnt]); + let rc_ptr = GEPi(bcx, box_ptr, [0u, abi::box_field_refcnt]/~); let rc = Sub(bcx, Load(bcx, rc_ptr), C_int(ccx, 1)); Store(bcx, rc, rc_ptr); let zero_test = ICmp(bcx, lib::llvm::IntEQ, C_int(ccx, 0), rc); @@ -978,12 +978,12 @@ fn compare_scalar_values(cx: block, lhs: ValueRef, rhs: ValueRef, type val_pair_fn = fn@(block, ValueRef, ValueRef) -> block; type val_and_ty_fn = fn@(block, ValueRef, ty::t) -> block; -fn load_inbounds(cx: block, p: ValueRef, idxs: [uint]) -> ValueRef { +fn load_inbounds(cx: block, p: ValueRef, idxs: [uint]/~) -> ValueRef { ret Load(cx, GEPi(cx, p, idxs)); } fn store_inbounds(cx: block, v: ValueRef, p: ValueRef, - idxs: [uint]) { + idxs: [uint]/~) { Store(cx, v, GEPi(cx, p, idxs)); } @@ -993,7 +993,8 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, let _icx = cx.insn_ctxt("iter_structural_ty"); fn iter_variant(cx: block, a_tup: ValueRef, - variant: ty::variant_info, tps: [ty::t], tid: ast::def_id, + variant: ty::variant_info, + tps: [ty::t]/~, tid: ast::def_id, f: val_and_ty_fn) -> block { let _icx = cx.insn_ctxt("iter_variant"); if variant.args.len() == 0u { ret cx; } @@ -1023,7 +1024,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, alt ty::get(t).struct { ty::ty_rec(fields) { for vec::eachi(fields) {|i, fld| - let llfld_a = GEPi(cx, av, [0u, i]); + let llfld_a = GEPi(cx, av, [0u, i]/~); cx = f(cx, llfld_a, fld.mt.ty); } } @@ -1034,7 +1035,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, } ty::ty_tup(args) { for vec::eachi(args) {|i, arg| - let llfld_a = GEPi(cx, av, [0u, i]); + let llfld_a = GEPi(cx, av, [0u, i]/~); cx = f(cx, llfld_a, arg); } } @@ -1051,8 +1052,8 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, let ccx = cx.ccx(); let llenumty = T_opaque_enum_ptr(ccx); let av_enum = PointerCast(cx, av, llenumty); - let lldiscrim_a_ptr = GEPi(cx, av_enum, [0u, 0u]); - let llunion_a_ptr = GEPi(cx, av_enum, [0u, 1u]); + let lldiscrim_a_ptr = GEPi(cx, av_enum, [0u, 0u]/~); + let llunion_a_ptr = GEPi(cx, av_enum, [0u, 1u]/~); let lldiscrim_a = Load(cx, lldiscrim_a_ptr); // NB: we must hit the discriminant first so that structural @@ -1078,13 +1079,13 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, ty::ty_class(did, substs) { // Take the drop bit into account let classptr = if is_some(ty::ty_dtor(cx.tcx(), did)) { - GEPi(cx, av, [0u, 1u]) + GEPi(cx, av, [0u, 1u]/~) } else { av }; for vec::eachi(ty::class_items_as_mutable_fields(cx.tcx(), did, substs)) {|i, fld| - let llfld_a = GEPi(cx, classptr, [0u, i]); + let llfld_a = GEPi(cx, classptr, [0u, i]/~); cx = f(cx, llfld_a, fld.mt.ty); } } @@ -1201,7 +1202,7 @@ fn call_tydesc_glue_full(++cx: block, v: ValueRef, tydesc: ValueRef, let llfn = { alt static_glue_fn { none { - let llfnptr = GEPi(cx, tydesc, [0u, field]); + let llfnptr = GEPi(cx, tydesc, [0u, field]/~); Load(cx, llfnptr) } some(sgf) { sgf } @@ -1209,7 +1210,7 @@ fn call_tydesc_glue_full(++cx: block, v: ValueRef, tydesc: ValueRef, }; Call(cx, llfn, [C_null(T_ptr(T_nil())), C_null(T_ptr(T_nil())), - C_null(T_ptr(T_ptr(cx.ccx().tydesc_type))), llrawptr]); + C_null(T_ptr(T_ptr(cx.ccx().tydesc_type))), llrawptr]/~); } // See [Note-arg-mode] @@ -1235,13 +1236,13 @@ fn call_cmp_glue(bcx: block, lhs: ValueRef, rhs: ValueRef, t: ty::t, let llrawrhsptr = BitCast(bcx, llrhs, T_ptr(T_i8())); let lltydesc = get_tydesc_simple(bcx.ccx(), t); let lltydescs = - Load(bcx, GEPi(bcx, lltydesc, [0u, abi::tydesc_field_first_param])); + Load(bcx, GEPi(bcx, lltydesc, [0u, abi::tydesc_field_first_param]/~)); let llfn = bcx.ccx().upcalls.cmp_type; let llcmpresultptr = alloca(bcx, T_i1()); Call(bcx, llfn, [llcmpresultptr, lltydesc, lltydescs, - llrawlhsptr, llrawrhsptr, llop]); + llrawlhsptr, llrawrhsptr, llop]/~); ret Load(bcx, llcmpresultptr); } @@ -1325,7 +1326,7 @@ fn call_memmove(cx: block, dst: ValueRef, src: ValueRef, let size = IntCast(cx, n_bytes, ccx.int_type); let align = C_i32(1i32); let volatile = C_bool(false); - Call(cx, memmove, [dst_ptr, src_ptr, size, align, volatile]); + Call(cx, memmove, [dst_ptr, src_ptr, size, align, volatile]/~); } fn memmove_ty(bcx: block, dst: ValueRef, src: ValueRef, t: ty::t) { @@ -1498,7 +1499,7 @@ fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr, bcx, un_expr.info(), fty, expr_ty(bcx, un_expr), {|bcx| impl::trans_method_callee(bcx, callee_id, e, mentry) }, - arg_exprs([]), dest); + arg_exprs([]/~), dest); } _ {} } @@ -1749,7 +1750,7 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, // the expr. impl::trans_method_callee(bcx, callee_id, dst, origin) }, - arg_exprs([src]), save_in(target)); + arg_exprs([src]/~), save_in(target)); ret move_val(bcx, DROP_EXISTING, lhs_res.val, {bcx: bcx, val: target, kind: owned}, @@ -1758,7 +1759,7 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, _ {} } - // Special case for `+= [x]` + // Special case for `+= [x]/~` alt ty::get(t).struct { ty::ty_vec(_) { alt src.node { @@ -1826,7 +1827,7 @@ fn autoderef(cx: block, e_id: ast::node_id, alt ty::get(t1).struct { ty::ty_box(mt) { - let body = GEPi(cx, v1, [0u, abi::box_field_body]); + let body = GEPi(cx, v1, [0u, abi::box_field_body]/~); t1 = mt.ty; // Since we're changing levels of box indirection, we may have @@ -1891,7 +1892,8 @@ fn trans_lazy_binop(bcx: block, op: lazy_binop_ty, a: @ast::expr, if past_rhs.unreachable { ret store_in_dest(join, lhs, dest); } Br(past_rhs, join.llbb); - let phi = Phi(join, T_bool(), [lhs, rhs], [past_lhs.llbb, past_rhs.llbb]); + let phi = + Phi(join, T_bool(), [lhs, rhs]/~, [past_lhs.llbb, past_rhs.llbb]/~); ret store_in_dest(join, phi, dest); } @@ -1909,7 +1911,7 @@ fn trans_binary(bcx: block, op: ast::binop, lhs: @ast::expr, {|bcx| impl::trans_method_callee(bcx, callee_id, lhs, origin) }, - arg_exprs([rhs]), dest); + arg_exprs([rhs]/~), dest); } _ {} } @@ -1968,7 +1970,8 @@ fn trans_if(cx: block, cond: @ast::expr, thn: ast::blk, _ { else_cx } }; let else_bcx = trans_block_cleanups(else_bcx, else_cx); - ret join_returns(cx, [then_bcx, else_bcx], [then_dest, else_dest], dest); + ret join_returns(cx, + [then_bcx, else_bcx]/~, [then_dest, else_dest]/~, dest); } fn trans_while(cx: block, cond: @ast::expr, body: ast::blk) @@ -2052,39 +2055,39 @@ fn trans_external_path(ccx: @crate_ctxt, did: ast::def_id, t: ty::t) } fn normalize_for_monomorphization(tcx: ty::ctxt, ty: ty::t) -> option { - // FIXME[mono] could do this recursively. is that worthwhile? (#2529) + // FIXME[mono]/~ could do this recursively. is that worthwhile? (#2529) alt ty::get(ty).struct { ty::ty_box(mt) { some(ty::mk_opaque_box(tcx)) } ty::ty_fn(fty) { some(ty::mk_fn(tcx, {purity: ast::impure_fn, proto: fty.proto, - inputs: [], + inputs: []/~, output: ty::mk_nil(tcx), ret_style: ast::return_val, - constraints: []})) } + constraints: []/~})) } ty::ty_iface(_, _) { some(ty::mk_fn(tcx, {purity: ast::impure_fn, proto: ast::proto_box, - inputs: [], + inputs: []/~, output: ty::mk_nil(tcx), ret_style: ast::return_val, - constraints: []})) } + constraints: []/~})) } ty::ty_ptr(_) { some(ty::mk_uint(tcx)) } _ { none } } } -fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: [ty::t], +fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: [ty::t]/~, vtables: option, - param_uses: option<[type_use::type_uses]>) -> mono_id { + param_uses: option<[type_use::type_uses]/~>) -> mono_id { let precise_param_ids = alt vtables { some(vts) { let bounds = ty::lookup_item_type(ccx.tcx, item).bounds; let mut i = 0u; vec::map2(*bounds, substs, {|bounds, subst| - let mut v = []; + let mut v = []/~; for vec::each(*bounds) {|bound| alt bound { ty::bound_iface(_) { - v += [impl::vtable_id(ccx, vts[i])]; + vec::push(v, impl::vtable_id(ccx, vts[i])); i += 1u; } _ {} @@ -2124,7 +2127,8 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: [ty::t], @{def: item, params: param_ids} } -fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t], +fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, + real_substs: [ty::t]/~, vtables: option, ref_id: option) -> {val: ValueRef, must_cast: bool} { @@ -2204,7 +2208,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t], } ccx.monomorphizing.insert(fn_id, depth + 1u); - let pt = *pt + [path_name(@ccx.names(*name))]; + let pt = *pt + [path_name(@ccx.names(*name))]/~; let s = mangle_exported_name(ccx, pt, mono_ty); let mk_lldecl = {|| @@ -2252,10 +2256,10 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t], ast_map::node_ctor(nm, tps, ctor, parent_id, _) { // ctors don't have attrs, at least not right now let d = mk_lldecl(); - let tp_tys: [ty::t] = ty::ty_params_to_tys(ccx.tcx, tps); + let tp_tys = ty::ty_params_to_tys(ccx.tcx, tps); trans_class_ctor(ccx, pt, ctor.node.dec, ctor.node.body, d, option::get_default(psubsts, - {tys:tp_tys, vtables: none, bounds: @[]}), + {tys:tp_tys, vtables: none, bounds: @[]/~}), fn_id.node, parent_id, ctor.span); d } @@ -2348,7 +2352,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) if (*impl_bnds).len() + mth.tps.len() == 0u { let llfn = get_item_val(ccx, mth.id); let path = ty::item_path(ccx.tcx, impl_did) + - [path_name(mth.ident)]; + [path_name(mth.ident)]/~; trans_fn(ccx, path, mth.decl, mth.body, llfn, impl_self(impl_ty), none, mth.id); } @@ -2373,7 +2377,7 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id) } fn lval_static_fn_inner(bcx: block, fn_id: ast::def_id, id: ast::node_id, - tys: [ty::t], vtables: option) + tys: [ty::t]/~, vtables: option) -> lval_maybe_callee { let _icx = bcx.insn_ctxt("lval_static_fn_inner"); let ccx = bcx.ccx(), tcx = ccx.tcx; @@ -2509,7 +2513,7 @@ fn trans_var(cx: block, def: ast::def, id: ast::node_id)-> lval_maybe_callee { // Nullary variant. let enum_ty = node_id_type(cx, id); let llenumptr = alloc_ty(cx, enum_ty); - let lldiscrimptr = GEPi(cx, llenumptr, [0u, 0u]); + let lldiscrimptr = GEPi(cx, llenumptr, [0u, 0u]/~); let lldiscrim_gv = lookup_discriminant(ccx, vid); let lldiscrim = Load(cx, lldiscrim_gv); Store(cx, lldiscrim, lldiscrimptr); @@ -2569,9 +2573,9 @@ fn trans_rec_field_inner(bcx: block, val: ValueRef, ty: ty::t, (If any other code does the same thing, that's a bug */ let val = if deref { - GEPi(bcx, GEPi(bcx, val, [0u, 1u]), [0u, ix]) + GEPi(bcx, GEPi(bcx, val, [0u, 1u]/~), [0u, ix]/~) } - else { GEPi(bcx, val, [0u, ix]) }; + else { GEPi(bcx, val, [0u, ix]/~) }; ret {bcx: bcx, val: val, kind: owned}; } @@ -2622,7 +2626,7 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr, // fail: bad bounds check. trans_fail(bcx, some(ex.span), "bounds check") }; - let elt = InBoundsGEP(bcx, base, [ix_val]); + let elt = InBoundsGEP(bcx, base, [ix_val]/~); ret lval_owned(bcx, PointerCast(bcx, elt, T_ptr(llunitty))); } @@ -2706,11 +2710,11 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result { let val = alt check ty::get(t).struct { ty::ty_box(_) { let non_gc_val = non_gc_box_cast(sub.bcx, sub.val); - GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]) + GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]/~) } ty::ty_uniq(_) { let non_gc_val = non_gc_box_cast(sub.bcx, sub.val); - GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]) + GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]/~) } ty::ty_enum(_, _) { let ety = expr_ty(cx, e); @@ -2857,7 +2861,7 @@ fn trans_cast(cx: block, e: @ast::expr, id: ast::node_id, let cx = e_res.bcx; let llenumty = T_opaque_enum_ptr(ccx); let av_enum = PointerCast(cx, e_res.val, llenumty); - let lldiscrim_a_ptr = GEPi(cx, av_enum, [0u, 0u]); + let lldiscrim_a_ptr = GEPi(cx, av_enum, [0u, 0u]/~); let lldiscrim_a = Load(cx, lldiscrim_a_ptr); alt k_out { cast_integral {int_cast(e_res.bcx, ll_t_out, @@ -2889,7 +2893,7 @@ fn trans_loop_body(bcx: block, e: @ast::expr, ret_flag: option, // temp_cleanups: cleanups that should run only if failure occurs before the // call takes place: fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr, - &temp_cleanups: [ValueRef], ret_flag: option, + &temp_cleanups: [ValueRef]/~, ret_flag: option, derefs: uint) -> result { #debug("+++ trans_arg_expr on %s", expr_to_str(e)); @@ -2979,7 +2983,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr, // In the event that failure occurs before the call actually // happens, have to cleanup this copy: add_clean_temp_mem(bcx, val, arg.ty); - temp_cleanups += [val]; + temp_cleanups += [val]/~; } } } @@ -2994,7 +2998,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr, } // when invoking a method, an argument of type @T or ~T can be implicltly -// converted to an argument of type &T. Similarly, [T] can be converted to +// converted to an argument of type &T. Similarly, [T]/~ can be converted to // [T]/& and so on. If such a conversion (called borrowing) is necessary, // then the borrowings table will have an appropriate entry inserted. This // routine consults this table and performs these adaptations. It returns a @@ -3012,7 +3016,7 @@ fn adapt_borrowed_value(lv: lval_result, alt ty::get(e_ty).struct { ty::ty_uniq(mt) | ty::ty_box(mt) { let box_ptr = load_value_from_lval_result(lv, e_ty); - let body_ptr = GEPi(bcx, box_ptr, [0u, abi::box_field_body]); + let body_ptr = GEPi(bcx, box_ptr, [0u, abi::box_field_body]/~); let rptr_ty = ty::mk_rptr(bcx.tcx(), ty::re_static, mt); ret {lv: lval_temp(bcx, body_ptr), ty: rptr_ty}; } @@ -3030,14 +3034,14 @@ fn adapt_borrowed_value(lv: lval_result, let unit_ty = ty::sequence_element_type(ccx.tcx, e_ty); let llunit_ty = type_of(ccx, unit_ty); let (base, len) = tvec::get_base_and_len(bcx, val, e_ty); - let p = alloca(bcx, T_struct([T_ptr(llunit_ty), ccx.int_type])); + let p = alloca(bcx, T_struct([T_ptr(llunit_ty), ccx.int_type]/~)); #debug("adapt_borrowed_value: adapting %s to %s", val_str(bcx.ccx().tn, val), val_str(bcx.ccx().tn, p)); - Store(bcx, base, GEPi(bcx, p, [0u, abi::slice_elt_base])); - Store(bcx, len, GEPi(bcx, p, [0u, abi::slice_elt_len])); + Store(bcx, base, GEPi(bcx, p, [0u, abi::slice_elt_base]/~)); + Store(bcx, len, GEPi(bcx, p, [0u, abi::slice_elt_len]/~)); // this isn't necessarily the type that rust would assign but it's // close enough for trans purposes, as it will have the same runtime @@ -3058,8 +3062,8 @@ fn adapt_borrowed_value(lv: lval_result, } enum call_args { - arg_exprs([@ast::expr]), - arg_vals([ValueRef]) + arg_exprs([@ast::expr]/~), + arg_vals([ValueRef]/~) } // NB: must keep 4 fns in sync: @@ -3070,11 +3074,11 @@ enum call_args { // - trans_args fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t, dest: dest, ret_flag: option) - -> {bcx: block, args: [ValueRef], retslot: ValueRef} { + -> {bcx: block, args: [ValueRef]/~, retslot: ValueRef} { let _icx = cx.insn_ctxt("trans_args"); - let mut temp_cleanups = []; + let mut temp_cleanups = []/~; let arg_tys = ty::ty_fn_args(fn_ty); - let mut llargs: [ValueRef] = []; + let mut llargs: [ValueRef]/~ = []/~; let ccx = cx.ccx(); let mut bcx = cx; @@ -3091,10 +3095,10 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t, by_val(_) { alloc_ty(bcx, retty) } }; - llargs += [llretslot]; + llargs += [llretslot]/~; // Arg 1: Env (closure-bindings / self value) - llargs += [llenv]; + llargs += [llenv]/~; // ... then explicit args. @@ -3110,7 +3114,7 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t, e, temp_cleanups, if i == last { ret_flag } else { none }, 0u); bcx = r.bcx; - llargs += [r.val]; + llargs += [r.val]/~; } } arg_vals(vs) { @@ -3199,9 +3203,9 @@ fn trans_call_inner( faddr = load_if_immediate(bcx, faddr, fn_expr_ty); } let pair = faddr; - faddr = GEPi(bcx, pair, [0u, abi::fn_field_code]); + faddr = GEPi(bcx, pair, [0u, abi::fn_field_code]/~); faddr = Load(bcx, faddr); - let llclosure = GEPi(bcx, pair, [0u, abi::fn_field_box]); + let llclosure = GEPi(bcx, pair, [0u, abi::fn_field_box]/~); Load(bcx, llclosure) } }; @@ -3247,7 +3251,7 @@ fn trans_call_inner( } } -fn invoke(bcx: block, llfn: ValueRef, llargs: [ValueRef]) -> block { +fn invoke(bcx: block, llfn: ValueRef, llargs: [ValueRef]/~) -> block { let _icx = bcx.insn_ctxt("invoke_"); if bcx.unreachable { ret bcx; } if need_invoke(bcx) { @@ -3335,7 +3339,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef { // The landing pad return type (the type being propagated). Not sure what // this represents but it's determined by the personality function and // this is what the EH proposal example uses. - let llretty = T_struct([T_ptr(T_i8()), T_i32()]); + let llretty = T_struct([T_ptr(T_i8()), T_i32()]/~); // The exception handling personality function. This is the C++ // personality function __gxx_personality_v0, wrapped in our naming // convention. @@ -3348,7 +3352,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef { // Because we may have unwound across a stack boundary, we must call into // the runtime to figure out which stack segment we are on and place the // stack limit back into the TLS. - Call(pad_bcx, bcx.ccx().upcalls.reset_stack_limit, []); + Call(pad_bcx, bcx.ccx().upcalls.reset_stack_limit, []/~); // We store the retval in a function-central alloca, so that calls to // Resume can find it. @@ -3366,7 +3370,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef { ret pad_bcx.llbb; } -fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block { +fn trans_tup(bcx: block, elts: [@ast::expr]/~, dest: dest) -> block { let _icx = bcx.insn_ctxt("trans_tup"); let mut bcx = bcx; let addr = alt dest { @@ -3377,19 +3381,19 @@ fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block { save_in(pos) { pos } _ { bcx.tcx().sess.bug("trans_tup: weird dest"); } }; - let mut temp_cleanups = []; + let mut temp_cleanups = []/~; for vec::eachi(elts) {|i, e| - let dst = GEPi(bcx, addr, [0u, i]); + let dst = GEPi(bcx, addr, [0u, i]/~); let e_ty = expr_ty(bcx, e); bcx = trans_expr_save_in(bcx, e, dst); add_clean_temp_mem(bcx, dst, e_ty); - temp_cleanups += [dst]; + temp_cleanups += [dst]/~; } for vec::each(temp_cleanups) {|cleanup| revoke_clean(bcx, cleanup); } ret bcx; } -fn trans_rec(bcx: block, fields: [ast::field], +fn trans_rec(bcx: block, fields: [ast::field]/~, base: option<@ast::expr>, id: ast::node_id, dest: dest) -> block { let _icx = bcx.insn_ctxt("trans_rec"); @@ -3407,15 +3411,15 @@ fn trans_rec(bcx: block, fields: [ast::field], let ty_fields = alt check ty::get(t).struct { ty::ty_rec(f) { f } }; - let mut temp_cleanups = []; + let mut temp_cleanups = []/~; for fields.each {|fld| let ix = option::get(vec::position(ty_fields, {|ft| str::eq(*fld.node.ident, *ft.ident) })); - let dst = GEPi(bcx, addr, [0u, ix]); + let dst = GEPi(bcx, addr, [0u, ix]/~); bcx = trans_expr_save_in(bcx, fld.node.expr, dst); add_clean_temp_mem(bcx, dst, ty_fields[ix].mt.ty); - temp_cleanups += [dst]; + temp_cleanups += [dst]/~; } alt base { some(bexp) { @@ -3424,8 +3428,8 @@ fn trans_rec(bcx: block, fields: [ast::field], // Copy over inherited fields for ty_fields.eachi {|i, tf| if !vec::any(fields, {|f| str::eq(*f.node.ident, *tf.ident)}) { - let dst = GEPi(bcx, addr, [0u, i]); - let base = GEPi(bcx, base_val, [0u, i]); + let dst = GEPi(bcx, addr, [0u, i]/~); + let base = GEPi(bcx, base_val, [0u, i]/~); let val = load_if_immediate(bcx, base, tf.mt.ty); bcx = copy_val(bcx, INIT, dst, val, tf.mt.ty); } @@ -3652,7 +3656,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block { { |bcx| impl::trans_method_callee(bcx, callee_id, base, origin) }, - arg_exprs([idx]), dest); + arg_exprs([idx]/~), dest); } // These return nothing @@ -3765,7 +3769,8 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block { #debug["voidval = %s", val_str(ccx.tn, voidval)]; let llval_ty = type_of(ccx, expr_ty(bcx, val)); - let args = [llsize_of(ccx, llval_ty), llalign_of(ccx, llval_ty)]; + let args = + [llsize_of(ccx, llval_ty), llalign_of(ccx, llval_ty)]/~; let origin = bcx.ccx().maps.method_map.get(alloc_id); let bcx = trans_call_inner( bcx, e.info(), node_id_type(bcx, alloc_id), void_ty, @@ -3867,7 +3872,7 @@ fn trans_log(log_ex: @ast::expr, lvl: @ast::expr, ret trans_expr(bcx, lvl, ignore); } - let modpath = [path_mod(ccx.link_meta.name)] + + let modpath = [path_mod(ccx.link_meta.name)]/~ + vec::filter(bcx.fcx.path, {|e| alt e { path_mod(_) { true } _ { false } } }); @@ -3902,7 +3907,7 @@ fn trans_log(log_ex: @ast::expr, lvl: @ast::expr, // Call the polymorphic log function. let val = spill_if_immediate(bcx, val, e_ty); let val = PointerCast(bcx, val, T_ptr(T_i8())); - Call(bcx, ccx.upcalls.log_type, [tydesc, val, level]); + Call(bcx, ccx.upcalls.log_type, [tydesc, val, level]/~); bcx } } @@ -3969,7 +3974,7 @@ fn trans_trace(bcx: block, sp_opt: option, trace_str: str) { let ccx = bcx.ccx(); let V_trace_str = PointerCast(bcx, V_trace_str, T_ptr(T_i8())); let V_filename = PointerCast(bcx, V_filename, T_ptr(T_i8())); - let args = [V_trace_str, V_filename, C_int(ccx, V_line)]; + let args = [V_trace_str, V_filename, C_int(ccx, V_line)]/~; Call(bcx, ccx.upcalls.trace, args); } @@ -3998,7 +4003,7 @@ fn trans_fail_value(bcx: block, sp_opt: option, }; let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8())); let V_filename = PointerCast(bcx, V_filename, T_ptr(T_i8())); - let args = [V_str, V_filename, C_int(ccx, V_line)]; + let args = [V_str, V_filename, C_int(ccx, V_line)]/~; let bcx = invoke(bcx, bcx.ccx().upcalls._fail, args); Unreachable(bcx); ret bcx; @@ -4160,8 +4165,8 @@ fn new_block(cx: fn_ctxt, parent: option, +kind: block_kind, } fn simple_block_scope() -> block_kind { - block_scope({loop_break: none, mut cleanups: [], - mut cleanup_paths: [], mut landing_pad: none}) + block_scope({loop_break: none, mut cleanups: []/~, + mut cleanup_paths: []/~, mut landing_pad: none}) } // Use this when you're at the top block of a function or the like. @@ -4181,8 +4186,8 @@ fn loop_scope_block(bcx: block, loop_break: block, n: str, opt_node_info: option) -> block { ret new_block(bcx.fcx, some(bcx), block_scope({ loop_break: some(loop_break), - mut cleanups: [], - mut cleanup_paths: [], + mut cleanups: []/~, + mut cleanup_paths: []/~, mut landing_pad: none }), n, opt_node_info); } @@ -4259,7 +4264,7 @@ fn cleanup_and_leave(bcx: block, upto: option, } let sub_cx = sub_block(bcx, "cleanup"); Br(bcx, sub_cx.llbb); - inf.cleanup_paths += [{target: leave, dest: sub_cx.llbb}]; + inf.cleanup_paths += [{target: leave, dest: sub_cx.llbb}]/~; bcx = trans_block_cleanups_(sub_cx, cur, is_lpad); } _ {} @@ -4448,7 +4453,7 @@ fn new_fn_ctxt(ccx: @crate_ctxt, path: path, llfndecl: ValueRef, // field of the fn_ctxt with fn create_llargs_for_fn_args(cx: fn_ctxt, ty_self: self_arg, - args: [ast::arg]) { + args: [ast::arg]/~) { let _icx = cx.insn_ctxt("create_llargs_for_fn_args"); // Skip the implicit arguments 0, and 1. let mut arg_n = first_real_arg; @@ -4472,8 +4477,8 @@ fn create_llargs_for_fn_args(cx: fn_ctxt, } } -fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg], - arg_tys: [ty::arg]) -> block { +fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg]/~, + arg_tys: [ty::arg]/~) -> block { let _icx = fcx.insn_ctxt("copy_args_to_allocas"); let tcx = bcx.tcx(); let mut arg_n: uint = 0u, bcx = bcx; @@ -4615,12 +4620,12 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id, ident: @"arg", id: varg.id} }); - let fcx = new_fn_ctxt_w_id(ccx, [], llfndecl, variant.node.id, + let fcx = new_fn_ctxt_w_id(ccx, []/~, llfndecl, variant.node.id, param_substs, none); create_llargs_for_fn_args(fcx, no_self, fn_args); let ty_param_substs = alt param_substs { some(substs) { substs.tys } - none { [] } + none { []/~ } }; let bcx = top_scope_block(fcx, none), lltop = bcx.llbb; let arg_tys = ty::ty_fn_args(node_id_type(bcx, variant.node.id)); @@ -4632,9 +4637,9 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id, } else { let llenumptr = PointerCast(bcx, fcx.llretptr, T_opaque_enum_ptr(ccx)); - let lldiscrimptr = GEPi(bcx, llenumptr, [0u, 0u]); + let lldiscrimptr = GEPi(bcx, llenumptr, [0u, 0u]/~); Store(bcx, C_int(ccx, disr), lldiscrimptr); - GEPi(bcx, llenumptr, [0u, 1u]) + GEPi(bcx, llenumptr, [0u, 1u]/~) }; let t_id = local_def(enum_id); let v_id = local_def(variant.node.id); @@ -4824,10 +4829,10 @@ fn trans_class_ctor(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, parent_id)) { // Initialize the drop flag let one = C_u8(1u); - let flag = GEPi(bcx_top, selfptr, [0u, 0u]); + let flag = GEPi(bcx_top, selfptr, [0u, 0u]/~); Store(bcx_top, one, flag); // Select the pointer to the class itself - GEPi(bcx_top, selfptr, [0u, 1u]) + GEPi(bcx_top, selfptr, [0u, 1u]/~) } else { selfptr }; @@ -4839,7 +4844,7 @@ fn trans_class_ctor(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, // drop their LHS for fields.each {|field| let ix = field_idx_strict(bcx.tcx(), sp, field.ident, fields); - bcx = zero_mem(bcx, GEPi(bcx, valptr, [0u, ix]), field.mt.ty); + bcx = zero_mem(bcx, GEPi(bcx, valptr, [0u, ix]/~), field.mt.ty); } // note we don't want to take *or* drop self. @@ -4872,7 +4877,7 @@ fn trans_class_dtor(ccx: @crate_ctxt, path: path, /* The dtor takes a (null) output pointer, and a self argument, and returns () */ let lldty = T_fn([T_ptr(type_of(ccx, ty::mk_nil(tcx))), - T_ptr(type_of(ccx, class_ty))], + T_ptr(type_of(ccx, class_ty))]/~, llvm::LLVMVoidType()); let s = get_dtor_symbol(ccx, path, dtor_id, psubsts); @@ -4901,11 +4906,11 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { ast::item_fn(decl, tps, body) { if decl.purity == ast::crust_fn { let llfndecl = get_item_val(ccx, item.id); - native::trans_crust_fn(ccx, *path + [path_name(item.ident)], + native::trans_crust_fn(ccx, *path + [path_name(item.ident)]/~, decl, body, llfndecl, item.id); } else if tps.len() == 0u { let llfndecl = get_item_val(ccx, item.id); - trans_fn(ccx, *path + [path_name(item.ident)], decl, body, + trans_fn(ccx, *path + [path_name(item.ident)]/~, decl, body, llfndecl, no_self, none, item.id); } else { for vec::each(body.node.stmts) {|stmt| @@ -4952,7 +4957,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { if tps.len() == 0u { let psubsts = {tys: ty::ty_params_to_tys(ccx.tcx, tps), vtables: none, - bounds: @[]}; + bounds: @[]/~}; trans_class_ctor(ccx, *path, ctor.node.dec, ctor.node.body, get_item_val(ccx, ctor.node.id), psubsts, ctor.node.id, local_def(item.id), ctor.span); @@ -5014,7 +5019,7 @@ fn register_fn_fuller(ccx: @crate_ctxt, sp: span, path: path, llfn } -// Create a _rust_main(args: [str]) function which will be called from the +// Create a _rust_main(args: [str]/~) function which will be called from the // runtime rust_start function fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef, main_node_type: ty::t) { @@ -5041,19 +5046,21 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef, {mode: ast::expl(ast::by_val), ty: ty::mk_vec(ccx.tcx, {ty: unit_ty, mutbl: ast::m_imm})}; let nt = ty::mk_nil(ccx.tcx); - let llfty = type_of_fn(ccx, [vecarg_ty], nt); + let llfty = type_of_fn(ccx, [vecarg_ty]/~, nt); let llfdecl = decl_fn(ccx.llmod, "_rust_main", lib::llvm::CCallConv, llfty); - let fcx = new_fn_ctxt(ccx, [], llfdecl, none); + let fcx = new_fn_ctxt(ccx, []/~, llfdecl, none); let bcx = top_scope_block(fcx, none); let lltop = bcx.llbb; let lloutputarg = llvm::LLVMGetParam(llfdecl, 0 as c_uint); let llenvarg = llvm::LLVMGetParam(llfdecl, 1 as c_uint); - let mut args = [lloutputarg, llenvarg]; - if takes_argv { args += [llvm::LLVMGetParam(llfdecl, 2 as c_uint)]; } + let mut args = [lloutputarg, llenvarg]/~; + if takes_argv { + args += [llvm::LLVMGetParam(llfdecl, 2 as c_uint)]/~; + } Call(bcx, main_llfn, args); build_return(bcx); @@ -5067,7 +5074,7 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef, fn main_name() -> str { ret "WinMain@16"; } #[cfg(unix)] fn main_name() -> str { ret "main"; } - let llfty = T_fn([ccx.int_type, ccx.int_type], ccx.int_type); + let llfty = T_fn([ccx.int_type, ccx.int_type]/~, ccx.int_type); let llfn = decl_cdecl_fn(ccx.llmod, main_name(), llfty); let llbb = str::as_c_str("top", {|buf| llvm::LLVMAppendBasicBlock(llfn, buf) @@ -5076,11 +5083,11 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef, llvm::LLVMPositionBuilderAtEnd(bld, llbb); let crate_map = ccx.crate_map; let start_ty = T_fn([val_ty(rust_main), ccx.int_type, ccx.int_type, - val_ty(crate_map)], ccx.int_type); + val_ty(crate_map)]/~, ccx.int_type); let start = decl_cdecl_fn(ccx.llmod, "rust_start", start_ty); let args = [rust_main, llvm::LLVMGetParam(llfn, 0 as c_uint), - llvm::LLVMGetParam(llfn, 1 as c_uint), crate_map]; + llvm::LLVMGetParam(llfn, 1 as c_uint), crate_map]/~; let result = unsafe { llvm::LLVMBuildCall(bld, start, vec::unsafe::to_ptr(args), args.len() as c_uint, noname()) @@ -5102,9 +5109,9 @@ fn create_real_fn_pair(cx: block, llfnty: TypeRef, llfn: ValueRef, fn fill_fn_pair(bcx: block, pair: ValueRef, llfn: ValueRef, llenvptr: ValueRef) { let ccx = bcx.ccx(); - let code_cell = GEPi(bcx, pair, [0u, abi::fn_field_code]); + let code_cell = GEPi(bcx, pair, [0u, abi::fn_field_code]/~); Store(bcx, llfn, code_cell); - let env_cell = GEPi(bcx, pair, [0u, abi::fn_field_box]); + let env_cell = GEPi(bcx, pair, [0u, abi::fn_field_box]/~); let llenvblobptr = PointerCast(bcx, llenvptr, T_opaque_box_ptr(ccx)); Store(bcx, llenvblobptr, env_cell); } @@ -5112,7 +5119,7 @@ fn fill_fn_pair(bcx: block, pair: ValueRef, llfn: ValueRef, fn item_path(ccx: @crate_ctxt, i: @ast::item) -> path { *alt check ccx.tcx.items.get(i.id) { ast_map::node_item(_, p) { p } - } + [path_name(i.ident)] + } + [path_name(i.ident)]/~ } /* If there's already a symbol for the dtor with and substs , @@ -5124,7 +5131,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, some(s) { s } none if is_none(substs) { let s = mangle_exported_name(ccx, - path + [path_name(@ccx.names("dtor"))], + path + [path_name(@ccx.names("dtor"))]/~, t); ccx.item_symbols.insert(id, s); s @@ -5136,7 +5143,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, some(ss) { let mono_ty = ty::subst_tps(ccx.tcx, ss.tys, t); mangle_exported_name(ccx, path + - [path_name(@ccx.names("dtor"))], mono_ty) + [path_name(@ccx.names("dtor"))]/~, mono_ty) } none { ccx.sess.bug(#fmt("get_dtor_symbol: not monomorphizing and \ @@ -5155,7 +5162,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { let mut exprt = false; let val = alt check ccx.tcx.items.get(id) { ast_map::node_item(i, pth) { - let my_path = *pth + [path_name(i.ident)]; + let my_path = *pth + [path_name(i.ident)]/~; alt check i.node { ast::item_const(_, _) { let typ = ty::node_id_to_type(ccx.tcx, i.id); @@ -5181,17 +5188,17 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { exprt = true; let mty = ty::node_id_to_type(ccx.tcx, id); let pth = *pth + [path_name(@ccx.names("meth")), - path_name(m.ident)]; + path_name(m.ident)]/~; let llfn = register_fn_full(ccx, m.span, pth, id, mty); set_inline_hint_if_appr(m.attrs, llfn); llfn } ast_map::node_native_item(ni, _, pth) { exprt = true; - register_fn(ccx, ni.span, *pth + [path_name(ni.ident)], ni.id) + register_fn(ccx, ni.span, *pth + [path_name(ni.ident)]/~, ni.id) } ast_map::node_ctor(nm, tps, ctor, _, pt) { - let my_path = *pt + [path_name(nm)]; + let my_path = *pt + [path_name(nm)]/~; register_fn(ccx, ctor.span, my_path, ctor.node.id) } ast_map::node_dtor(tps, dt, parent_id, pt) { @@ -5205,7 +5212,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { // This code shouldn't be reached if the class is generic assert !ty::type_has_params(class_ty); let lldty = T_fn([T_ptr(type_of(ccx, ty::mk_nil(tcx))), - T_ptr(type_of(ccx, class_ty))], + T_ptr(type_of(ccx, class_ty))]/~, llvm::LLVMVoidType()); let s = get_dtor_symbol(ccx, *pt, dt.node.id, none); @@ -5217,7 +5224,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { ast_map::node_variant(v, enm, pth) { assert v.node.args.len() != 0u; - let pth = *pth + [path_name(enm.ident), path_name(v.node.name)]; + let pth = *pth + [path_name(enm.ident), path_name(v.node.name)]/~; let llfn = alt check enm.node { ast::item_enum(_, _, _) { register_fn(ccx, v.span, pth, id) @@ -5247,7 +5254,7 @@ fn trans_constant(ccx: @crate_ctxt, it: @ast::item) { let path = item_path(ccx, it); for vec::each(variants) {|variant| let p = path + [path_name(variant.node.name), - path_name(@"discrim")]; + path_name(@"discrim")]/~; let s = mangle_exported_name(ccx, p, ty::mk_int(ccx.tcx)); let disr_val = vi[i].disr_val; note_unique_llvm_symbol(ccx, s); @@ -5283,22 +5290,24 @@ fn p2i(ccx: @crate_ctxt, v: ValueRef) -> ValueRef { } fn declare_intrinsics(llmod: ModuleRef) -> hashmap { - let T_memmove32_args: [TypeRef] = - [T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()]; - let T_memmove64_args: [TypeRef] = - [T_ptr(T_i8()), T_ptr(T_i8()), T_i64(), T_i32(), T_i1()]; - let T_memset32_args: [TypeRef] = - [T_ptr(T_i8()), T_i8(), T_i32(), T_i32(), T_i1()]; - let T_memset64_args: [TypeRef] = - [T_ptr(T_i8()), T_i8(), T_i64(), T_i32(), T_i1()]; - let T_trap_args: [TypeRef] = []; - let T_frameaddress_args: [TypeRef] = [T_i32()]; + let T_memmove32_args: [TypeRef]/~ = + [T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()]/~; + let T_memmove64_args: [TypeRef]/~ = + [T_ptr(T_i8()), T_ptr(T_i8()), T_i64(), T_i32(), T_i1()]/~; + let T_memset32_args: [TypeRef]/~ = + [T_ptr(T_i8()), T_i8(), T_i32(), T_i32(), T_i1()]/~; + let T_memset64_args: [TypeRef]/~ = + [T_ptr(T_i8()), T_i8(), T_i64(), T_i32(), T_i1()]/~; + let T_trap_args: [TypeRef]/~ = []/~; + let T_frameaddress_args: [TypeRef]/~ = [T_i32()]/~; let gcroot = decl_cdecl_fn(llmod, "llvm.gcroot", - T_fn([T_ptr(T_ptr(T_i8())), T_ptr(T_i8())], T_void())); + T_fn([T_ptr(T_ptr(T_i8())), T_ptr(T_i8())]/~, + T_void())); let gcread = decl_cdecl_fn(llmod, "llvm.gcread", - T_fn([T_ptr(T_i8()), T_ptr(T_ptr(T_i8()))], T_void())); + T_fn([T_ptr(T_i8()), T_ptr(T_ptr(T_i8()))]/~, + T_void())); let memmove32 = decl_cdecl_fn(llmod, "llvm.memmove.p0i8.p0i8.i32", T_fn(T_memmove32_args, T_void())); @@ -5311,7 +5320,8 @@ fn declare_intrinsics(llmod: ModuleRef) -> hashmap { let memset64 = decl_cdecl_fn(llmod, "llvm.memset.p0i8.i64", T_fn(T_memset64_args, T_void())); - let trap = decl_cdecl_fn(llmod, "llvm.trap", T_fn(T_trap_args, T_void())); + let trap = decl_cdecl_fn(llmod, "llvm.trap", T_fn(T_trap_args, + T_void())); let frameaddress = decl_cdecl_fn(llmod, "llvm.frameaddress", T_fn(T_frameaddress_args, T_ptr(T_i8()))); @@ -5331,16 +5341,17 @@ fn declare_dbg_intrinsics(llmod: ModuleRef, intrinsics: hashmap) { let declare = decl_cdecl_fn(llmod, "llvm.dbg.declare", - T_fn([T_metadata(), T_metadata()], T_void())); + T_fn([T_metadata(), T_metadata()]/~, T_void())); let value = decl_cdecl_fn(llmod, "llvm.dbg.value", - T_fn([T_metadata(), T_i64(), T_metadata()], T_void())); + T_fn([T_metadata(), T_i64(), T_metadata()]/~, + T_void())); intrinsics.insert("llvm.dbg.declare", declare); intrinsics.insert("llvm.dbg.value", value); } fn trap(bcx: block) { - let v: [ValueRef] = []; + let v: [ValueRef]/~ = []/~; alt bcx.ccx().intrinsics.find("llvm.trap") { some(x) { Call(bcx, x, v); } _ { bcx.sess().bug("unbound llvm.trap in trap"); } @@ -5348,20 +5359,20 @@ fn trap(bcx: block) { } fn create_module_map(ccx: @crate_ctxt) -> ValueRef { - let elttype = T_struct([ccx.int_type, ccx.int_type]); + let elttype = T_struct([ccx.int_type, ccx.int_type]/~); let maptype = T_array(elttype, ccx.module_data.size() + 1u); let map = str::as_c_str("_rust_mod_map", {|buf| llvm::LLVMAddGlobal(ccx.llmod, maptype, buf) }); lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage); - let mut elts: [ValueRef] = []; + let mut elts: [ValueRef]/~ = []/~; for ccx.module_data.each {|key, val| let elt = C_struct([p2i(ccx, C_cstr(ccx, key)), - p2i(ccx, val)]); - elts += [elt]; + p2i(ccx, val)]/~); + elts += [elt]/~; }; - let term = C_struct([C_int(ccx, 0), C_int(ccx, 0)]); - elts += [term]; + let term = C_struct([C_int(ccx, 0), C_int(ccx, 0)]/~); + elts += [term]/~; llvm::LLVMSetInitializer(map, C_array(elttype, elts)); ret map; } @@ -5379,7 +5390,7 @@ fn decl_crate_map(sess: session::session, mapmeta: link_meta, } else { "toplevel" }; let sym_name = "_rust_crate_map_" + mapname; let arrtype = T_array(int_type, n_subcrates as uint); - let maptype = T_struct([int_type, arrtype]); + let maptype = T_struct([int_type, arrtype]/~); let map = str::as_c_str(sym_name, {|buf| llvm::LLVMAddGlobal(llmod, maptype, buf) }); @@ -5388,7 +5399,7 @@ fn decl_crate_map(sess: session::session, mapmeta: link_meta, } fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) { - let mut subcrates: [ValueRef] = []; + let mut subcrates: [ValueRef]/~ = []/~; let mut i = 1; let cstore = ccx.sess.cstore; while cstore::have_crate_data(cstore, i) { @@ -5399,13 +5410,13 @@ fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) { let cr = str::as_c_str(nm, {|buf| llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf) }); - subcrates += [p2i(ccx, cr)]; + subcrates += [p2i(ccx, cr)]/~; i += 1; } - subcrates += [C_int(ccx, 0)]; + subcrates += [C_int(ccx, 0)]/~; llvm::LLVMSetInitializer(map, C_struct( [p2i(ccx, create_module_map(ccx)), - C_array(ccx.int_type, subcrates)])); + C_array(ccx.int_type, subcrates)]/~)); } fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) @@ -5427,8 +5438,8 @@ fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) encode_inlined_item: encode_inlined_item }; - fn reexports(cx: @crate_ctxt) -> [(str, ast::def_id)] { - let mut reexports = []; + fn reexports(cx: @crate_ctxt) -> [(str, ast::def_id)]/~ { + let mut reexports = []/~; for cx.exp_map.each {|exp_id, defs| for defs.each {|def| if !def.reexp { cont; } @@ -5437,14 +5448,14 @@ fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) ast_map::path_to_str(*path) } }; - reexports += [(path, def.id)]; + reexports += [(path, def.id)]/~; } } ret reexports; } fn impl_map(cx: @crate_ctxt, - id: ast::node_id) -> [(ast::ident, ast::def_id)] { + id: ast::node_id) -> [(ast::ident, ast::def_id)]/~ { alt *cx.maps.impl_map.get(id) { list::cons(impls, @list::nil) { (*impls).map {|i| @@ -5463,7 +5474,7 @@ fn write_metadata(cx: @crate_ctxt, crate: @ast::crate) { if !cx.sess.building_library { ret; } let encode_parms = crate_ctxt_to_encode_parms(cx); let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate)); - let llconst = C_struct([llmeta]); + let llconst = C_struct([llmeta]/~); let mut llglobal = str::as_c_str("rust_metadata", {|buf| llvm::LLVMAddGlobal(cx.llmod, val_ty(llconst), buf) }); @@ -5479,7 +5490,7 @@ fn write_metadata(cx: @crate_ctxt, crate: @ast::crate) { llvm::LLVMAddGlobal(cx.llmod, T_array(t_ptr_i8, 1u), buf) }); lib::llvm::SetLinkage(llvm_used, lib::llvm::AppendingLinkage); - llvm::LLVMSetInitializer(llvm_used, C_array(t_ptr_i8, [llglobal])); + llvm::LLVMSetInitializer(llvm_used, C_array(t_ptr_i8, [llglobal]/~)); } // Writes the current ABI version into the crate. @@ -5577,9 +5588,9 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt, mut n_glues_created: 0u, mut n_null_glues: 0u, mut n_real_glues: 0u, - llvm_insn_ctxt: @mut [], + llvm_insn_ctxt: @mut []/~, llvm_insns: str_hash(), - fn_times: @mut []}, + fn_times: @mut []/~}, upcalls: upcall::declare_upcalls(targ_cfg, tn, tydesc_type, llmod), diff --git a/src/rustc/middle/trans/build.rs b/src/rustc/middle/trans/build.rs index 6932dbda0bb..ca18bd25289 100644 --- a/src/rustc/middle/trans/build.rs +++ b/src/rustc/middle/trans/build.rs @@ -79,7 +79,7 @@ fn Ret(cx: block, V: ValueRef) { llvm::LLVMBuildRet(B(cx), V); } -fn AggregateRet(cx: block, RetVals: [ValueRef]) { +fn AggregateRet(cx: block, RetVals: [ValueRef]/~) { if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; @@ -134,7 +134,7 @@ fn noname() -> *libc::c_char unsafe { ret unsafe::reinterpret_cast(ptr::addr_of(cnull)); } -fn Invoke(cx: block, Fn: ValueRef, Args: [ValueRef], +fn Invoke(cx: block, Fn: ValueRef, Args: [ValueRef]/~, Then: BasicBlockRef, Catch: BasicBlockRef) { if cx.unreachable { ret; } assert (!cx.terminated); @@ -151,7 +151,7 @@ fn Invoke(cx: block, Fn: ValueRef, Args: [ValueRef], } } -fn FastInvoke(cx: block, Fn: ValueRef, Args: [ValueRef], +fn FastInvoke(cx: block, Fn: ValueRef, Args: [ValueRef]/~, Then: BasicBlockRef, Catch: BasicBlockRef) { if cx.unreachable { ret; } assert (!cx.terminated); @@ -417,7 +417,7 @@ fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) { llvm::LLVMBuildStore(B(cx), Val, Ptr); } -fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef { +fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]/~) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_nil())); } unsafe { count_insn(cx, "gep"); @@ -428,14 +428,14 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef { // Simple wrapper around GEP that takes an array of ints and wraps them // in C_i32() -fn GEPi(cx: block, base: ValueRef, ixs: [uint]) -> ValueRef { - let mut v: [ValueRef] = []; - for vec::each(ixs) {|i| v += [C_i32(i as i32)]; } +fn GEPi(cx: block, base: ValueRef, ixs: [uint]/~) -> ValueRef { + let mut v: [ValueRef]/~ = []/~; + for vec::each(ixs) {|i| v += [C_i32(i as i32)]/~; } count_insn(cx, "gepi"); ret InBoundsGEP(cx, base, v); } -fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> +fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]/~) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_nil())); } unsafe { @@ -607,7 +607,7 @@ fn EmptyPhi(cx: block, Ty: TypeRef) -> ValueRef { ret llvm::LLVMBuildPhi(B(cx), Ty, noname()); } -fn Phi(cx: block, Ty: TypeRef, vals: [ValueRef], bbs: [BasicBlockRef]) +fn Phi(cx: block, Ty: TypeRef, vals: [ValueRef]/~, bbs: [BasicBlockRef]/~) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(Ty); } assert vals.len() == bbs.len(); @@ -657,15 +657,15 @@ fn add_comment(bcx: block, text: str) { let asm = str::as_c_str(comment_text, {|c| str::as_c_str("", {|e| count_insn(bcx, "inlineasm"); - llvm::LLVMConstInlineAsm(T_fn([], T_void()), c, e, + llvm::LLVMConstInlineAsm(T_fn([]/~, T_void()), c, e, False, False) }) }); - Call(bcx, asm, []); + Call(bcx, asm, []/~); } } -fn Call(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { +fn Call(cx: block, Fn: ValueRef, Args: [ValueRef]/~) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { count_insn(cx, "call"); @@ -679,7 +679,7 @@ fn Call(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { } } -fn FastCall(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { +fn FastCall(cx: block, Fn: ValueRef, Args: [ValueRef]/~) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { count_insn(cx, "fastcall"); @@ -690,7 +690,7 @@ fn FastCall(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { } } -fn CallWithConv(cx: block, Fn: ValueRef, Args: [ValueRef], +fn CallWithConv(cx: block, Fn: ValueRef, Args: [ValueRef]/~, Conv: CallConv) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { @@ -779,7 +779,7 @@ fn Trap(cx: block) { llvm::LLVMGetNamedFunction(M, buf) }); assert (T as int != 0); - let Args: [ValueRef] = []; + let Args: [ValueRef]/~ = []/~; unsafe { count_insn(cx, "trap"); llvm::LLVMBuildCall(b, T, vec::unsafe::to_ptr(Args), diff --git a/src/rustc/middle/trans/closure.rs b/src/rustc/middle/trans/closure.rs index 745c25f6f79..51a1bba1162 100644 --- a/src/rustc/middle/trans/closure.rs +++ b/src/rustc/middle/trans/closure.rs @@ -122,9 +122,9 @@ fn mk_tuplified_uniq_cbox_ty(tcx: ty::ctxt, cdata_ty: ty::t) -> ty::t { // Given a closure ty, emits a corresponding tuple ty fn mk_closure_tys(tcx: ty::ctxt, - bound_values: [environment_value]) - -> (ty::t, [ty::t]) { - let mut bound_tys = []; + bound_values: [environment_value]/~) + -> (ty::t, [ty::t]/~) { + let mut bound_tys = []/~; // Compute the closed over data for vec::each(bound_values) {|bv| @@ -133,12 +133,12 @@ fn mk_closure_tys(tcx: ty::ctxt, env_move(_, t, _) { t } env_ref(_, t, _) { t } env_expr(_, t) { t } - }]; + }]/~; } let bound_data_ty = ty::mk_tup(tcx, bound_tys); - // FIXME[mono] remove tuple of tydescs from closure types (#2531) - let cdata_ty = ty::mk_tup(tcx, [ty::mk_tup(tcx, []), - bound_data_ty]); + // FIXME[mono]/~ remove tuple of tydescs from closure types (#2531) + let cdata_ty = ty::mk_tup(tcx, [ty::mk_tup(tcx, []/~), + bound_data_ty]/~); #debug["cdata_ty=%s", ty_to_str(tcx, cdata_ty)]; ret (cdata_ty, bound_tys); } @@ -146,7 +146,7 @@ fn mk_closure_tys(tcx: ty::ctxt, fn allocate_cbox(bcx: block, ck: ty::closure_kind, cdata_ty: ty::t) - -> (block, ValueRef, [ValueRef]) { + -> (block, ValueRef, [ValueRef]/~) { let _icx = bcx.insn_ctxt("closure::allocate_cbox"); let ccx = bcx.ccx(), tcx = ccx.tcx; @@ -155,7 +155,7 @@ fn allocate_cbox(bcx: block, // Initialize ref count to arbitrary value for debugging: let ccx = bcx.ccx(); let box = PointerCast(bcx, box, T_opaque_box_ptr(ccx)); - let ref_cnt = GEPi(bcx, box, [0u, abi::box_field_refcnt]); + let ref_cnt = GEPi(bcx, box, [0u, abi::box_field_refcnt]/~); let rc = C_int(ccx, 0x12345678); Store(bcx, rc, ref_cnt); } @@ -164,7 +164,7 @@ fn allocate_cbox(bcx: block, cdata_ty: ty::t, box: ValueRef, &ti: option<@tydesc_info>) -> block { - let bound_tydesc = GEPi(bcx, box, [0u, abi::box_field_tydesc]); + let bound_tydesc = GEPi(bcx, box, [0u, abi::box_field_tydesc]/~); let td = base::get_tydesc(bcx.ccx(), cdata_ty, ti); Store(bcx, td, bound_tydesc); bcx @@ -172,7 +172,7 @@ fn allocate_cbox(bcx: block, // Allocate and initialize the box: let mut ti = none; - let mut temp_cleanups = []; + let mut temp_cleanups = []/~; let (bcx, box) = alt ck { ty::ck_box { get_tydesc(ccx, cdata_ty, ti); @@ -209,7 +209,7 @@ type closure_result = { // heap allocated closure that copies the upvars into environment. // Otherwise, it is stack allocated and copies pointers to the upvars. fn store_environment(bcx: block, - bound_values: [environment_value], + bound_values: [environment_value]/~, ck: ty::closure_kind) -> closure_result { let _icx = bcx.insn_ctxt("closure::store_environment"); let ccx = bcx.ccx(), tcx = ccx.tcx; @@ -242,12 +242,12 @@ fn store_environment(bcx: block, } let bound_data = GEPi(bcx, llbox, - [0u, abi::box_field_body, abi::closure_body_bindings, i]); + [0u, abi::box_field_body, abi::closure_body_bindings, i]/~); alt bv { env_expr(e, _) { bcx = base::trans_expr_save_in(bcx, e, bound_data); add_clean_temp_mem(bcx, bound_data, bound_tys[i]); - temp_cleanups += [bound_data]; + temp_cleanups += [bound_data]/~; } env_copy(val, ty, owned) { let val1 = load_if_immediate(bcx, val, ty); @@ -283,13 +283,13 @@ fn store_environment(bcx: block, // Given a context and a list of upvars, build a closure. This just // collects the upvars and packages them up for store_environment. fn build_closure(bcx0: block, - cap_vars: [capture::capture_var], + cap_vars: [capture::capture_var]/~, ck: ty::closure_kind, id: ast::node_id, include_ret_handle: option) -> closure_result { let _icx = bcx0.insn_ctxt("closure::build_closure"); // If we need to, package up the iterator body to call - let mut env_vals = []; + let mut env_vals = []/~; let mut bcx = bcx0; let ccx = bcx.ccx(), tcx = ccx.tcx; @@ -303,18 +303,18 @@ fn build_closure(bcx0: block, capture::cap_ref { assert ck == ty::ck_block; ty = ty::mk_mut_ptr(tcx, ty); - env_vals += [env_ref(lv.val, ty, lv.kind)]; + env_vals += [env_ref(lv.val, ty, lv.kind)]/~; } capture::cap_copy { let mv = alt check ccx.maps.last_use_map.find(id) { none { false } some(vars) { (*vars).contains(nid) } }; - if mv { env_vals += [env_move(lv.val, ty, lv.kind)]; } - else { env_vals += [env_copy(lv.val, ty, lv.kind)]; } + if mv { env_vals += [env_move(lv.val, ty, lv.kind)]/~; } + else { env_vals += [env_copy(lv.val, ty, lv.kind)]/~; } } capture::cap_move { - env_vals += [env_move(lv.val, ty, lv.kind)]; + env_vals += [env_move(lv.val, ty, lv.kind)]/~; } capture::cap_drop { assert lv.kind == owned; @@ -331,7 +331,7 @@ fn build_closure(bcx0: block, let nil_ret = PointerCast(bcx, our_ret, T_ptr(T_nil())); env_vals += [env_ref(flagptr, ty::mk_mut_ptr(tcx, ty::mk_bool(tcx)), owned), - env_ref(nil_ret, ty::mk_nil_ptr(tcx), owned)]; + env_ref(nil_ret, ty::mk_nil_ptr(tcx), owned)]/~; } ret store_environment(bcx, env_vals, ck); } @@ -341,7 +341,7 @@ fn build_closure(bcx0: block, // with the upvars and type descriptors. fn load_environment(fcx: fn_ctxt, cdata_ty: ty::t, - cap_vars: [capture::capture_var], + cap_vars: [capture::capture_var]/~, load_ret_handle: bool, ck: ty::closure_kind) { let _icx = fcx.insn_ctxt("closure::load_environment"); @@ -357,7 +357,7 @@ fn load_environment(fcx: fn_ctxt, capture::cap_drop { /* ignore */ } _ { let mut upvarptr = - GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, i]); + GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, i]/~); alt ck { ty::ck_block { upvarptr = Load(bcx, upvarptr); } ty::ck_uniq | ty::ck_box { } @@ -370,9 +370,10 @@ fn load_environment(fcx: fn_ctxt, } if load_ret_handle { let flagptr = Load(bcx, GEPi(bcx, llcdata, - [0u, abi::closure_body_bindings, i])); - let retptr = Load(bcx, GEPi(bcx, llcdata, - [0u, abi::closure_body_bindings, i+1u])); + [0u, abi::closure_body_bindings, i]/~)); + let retptr = Load(bcx, + GEPi(bcx, llcdata, + [0u, abi::closure_body_bindings, i+1u]/~)); fcx.loop_ret = some({flagptr: flagptr, retptr: retptr}); } } @@ -390,7 +391,7 @@ fn trans_expr_fn(bcx: block, let ccx = bcx.ccx(), bcx = bcx; let fty = node_id_type(bcx, id); let llfnty = type_of_fn_from_ty(ccx, fty); - let sub_path = bcx.fcx.path + [path_name(@"anon")]; + let sub_path = bcx.fcx.path + [path_name(@"anon")]/~; let s = mangle_internal_name_by_path(ccx, sub_path); let llfn = decl_internal_cdecl_fn(ccx.llmod, s, llfnty); @@ -428,13 +429,13 @@ fn trans_expr_fn(bcx: block, fn trans_bind_1(cx: block, outgoing_fty: ty::t, f_res: lval_maybe_callee, - args: [option<@ast::expr>], pair_ty: ty::t, + args: [option<@ast::expr>]/~, pair_ty: ty::t, dest: dest) -> block { let _icx = cx.insn_ctxt("closure::trans_bind1"); let ccx = cx.ccx(); - let mut bound: [@ast::expr] = []; + let mut bound: [@ast::expr]/~ = []/~; for vec::each(args) {|argopt| - alt argopt { none { } some(e) { bound += [e]; } } + alt argopt { none { } some(e) { bound += [e]/~; } } } let mut bcx = f_res.bcx; if dest == ignore { @@ -453,22 +454,22 @@ fn trans_bind_1(cx: block, outgoing_fty: ty::t, // Arrange for the bound function to live in the first binding spot // if the function is not statically known. let (env_vals, target_info) = alt f_res.env { - null_env { ([], target_static(f_res.val)) } + null_env { ([]/~, target_static(f_res.val)) } is_closure { // Cast the function we are binding to be the type that the // closure will expect it to have. The type the closure knows // about has the type parameters substituted with the real types. let llclosurety = T_ptr(type_of(ccx, outgoing_fty)); let src_loc = PointerCast(bcx, f_res.val, llclosurety); - ([env_copy(src_loc, pair_ty, owned)], target_closure) + ([env_copy(src_loc, pair_ty, owned)]/~, target_closure) } self_env(slf, slf_t, none) { - ([env_copy(slf, slf_t, owned)], target_static_self(f_res.val)) + ([env_copy(slf, slf_t, owned)]/~, target_static_self(f_res.val)) } self_env(_, slf_t, some(slf)) { let cast = PointerCast(bcx, f_res.val, T_ptr(T_nil())); ([env_copy(cast, ty::mk_nil_ptr(ccx.tcx), owned_imm), - env_copy(slf, slf_t, owned_imm)], target_self) + env_copy(slf, slf_t, owned_imm)]/~, target_self) } }; @@ -498,7 +499,7 @@ fn make_fn_glue( let tcx = cx.tcx(); let fn_env = fn@(ck: ty::closure_kind) -> block { - let box_cell_v = GEPi(cx, v, [0u, abi::fn_field_box]); + let box_cell_v = GEPi(cx, v, [0u, abi::fn_field_box]/~); let box_ptr_v = Load(cx, box_cell_v); with_cond(cx, IsNotNull(cx, box_ptr_v)) {|bcx| let closure_ty = ty::mk_opaque_closure_ptr(tcx, ck); @@ -536,28 +537,28 @@ fn make_opaque_cbox_take_glue( with_cond(bcx, IsNotNull(bcx, cbox_in)) {|bcx| // Load the size from the type descr found in the cbox let cbox_in = PointerCast(bcx, cbox_in, llopaquecboxty); - let tydescptr = GEPi(bcx, cbox_in, [0u, abi::box_field_tydesc]); + let tydescptr = GEPi(bcx, cbox_in, [0u, abi::box_field_tydesc]/~); let tydesc = Load(bcx, tydescptr); let tydesc = PointerCast(bcx, tydesc, T_ptr(ccx.tydesc_type)); - let sz = Load(bcx, GEPi(bcx, tydesc, [0u, abi::tydesc_field_size])); + let sz = Load(bcx, GEPi(bcx, tydesc, [0u, abi::tydesc_field_size]/~)); // Adjust sz to account for the rust_opaque_box header fields let sz = Add(bcx, sz, shape::llsize_of(ccx, T_box_header(ccx))); // Allocate memory, update original ptr, and copy existing data let malloc = ccx.upcalls.exchange_malloc; - let cbox_out = Call(bcx, malloc, [tydesc, sz]); + let cbox_out = Call(bcx, malloc, [tydesc, sz]/~); let cbox_out = PointerCast(bcx, cbox_out, llopaquecboxty); call_memmove(bcx, cbox_out, cbox_in, sz); Store(bcx, cbox_out, cboxptr); // Take the (deeply cloned) type descriptor - let tydesc_out = GEPi(bcx, cbox_out, [0u, abi::box_field_tydesc]); + let tydesc_out = GEPi(bcx, cbox_out, [0u, abi::box_field_tydesc]/~); let bcx = take_ty(bcx, tydesc_out, ty::mk_type(tcx)); // Take the data in the tuple let ti = none; - let cdata_out = GEPi(bcx, cbox_out, [0u, abi::box_field_body]); + let cdata_out = GEPi(bcx, cbox_out, [0u, abi::box_field_body]/~); call_tydesc_glue_full(bcx, cdata_out, tydesc, abi::tydesc_field_take_glue, ti); bcx @@ -599,13 +600,13 @@ fn make_opaque_cbox_free_glue( // Load the type descr found in the cbox let lltydescty = T_ptr(ccx.tydesc_type); let cbox = PointerCast(bcx, cbox, T_opaque_cbox_ptr(ccx)); - let tydescptr = GEPi(bcx, cbox, [0u, abi::box_field_tydesc]); + let tydescptr = GEPi(bcx, cbox, [0u, abi::box_field_tydesc]/~); let tydesc = Load(bcx, tydescptr); let tydesc = PointerCast(bcx, tydesc, lltydescty); // Drop the tuple data then free the descriptor let ti = none; - let cdata = GEPi(bcx, cbox, [0u, abi::box_field_body]); + let cdata = GEPi(bcx, cbox, [0u, abi::box_field_body]/~); call_tydesc_glue_full(bcx, cdata, tydesc, abi::tydesc_field_drop_glue, ti); @@ -634,14 +635,14 @@ fn trans_bind_thunk(ccx: @crate_ctxt, path: path, incoming_fty: ty::t, outgoing_fty: ty::t, - args: [option<@ast::expr>], + args: [option<@ast::expr>]/~, cdata_ty: ty::t, target_info: target_info) -> {val: ValueRef, ty: TypeRef} { let _icx = ccx.insn_ctxt("closure::trans_bind_thunk"); let tcx = ccx.tcx; #debug["trans_bind_thunk[incoming_fty=%s,outgoing_fty=%s,\ - cdata_ty=%s]", + cdata_ty=%s]/~", ty_to_str(tcx, incoming_fty), ty_to_str(tcx, outgoing_fty), ty_to_str(tcx, cdata_ty)]; @@ -701,22 +702,25 @@ fn trans_bind_thunk(ccx: @crate_ctxt, (fptr, llvm::LLVMGetUndef(T_opaque_cbox_ptr(ccx)), 0u) } target_closure { - let pair = GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 0u]); + let pair = GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 0u]/~); let lltargetenv = - Load(bcx, GEPi(bcx, pair, [0u, abi::fn_field_box])); + Load(bcx, GEPi(bcx, pair, [0u, abi::fn_field_box]/~)); let lltargetfn = Load - (bcx, GEPi(bcx, pair, [0u, abi::fn_field_code])); + (bcx, GEPi(bcx, pair, [0u, abi::fn_field_code]/~)); (lltargetfn, lltargetenv, 1u) } target_self { let fptr = Load(bcx, GEPi(bcx, llcdata, - [0u, abi::closure_body_bindings, 0u])); - let slfbox = GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 1u]); - let selfptr = GEPi(bcx, Load(bcx, slfbox), [0u, abi::box_field_body]); + [0u, abi::closure_body_bindings, 0u]/~)); + let slfbox = + GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 1u]/~); + let selfptr = + GEPi(bcx, Load(bcx, slfbox), [0u, abi::box_field_body]/~); (fptr, PointerCast(bcx, selfptr, T_opaque_cbox_ptr(ccx)), 2u) } target_static_self(fptr) { - let slfptr = GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 0u]); + let slfptr = + GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, 0u]/~); (fptr, PointerCast(bcx, slfptr, T_opaque_cbox_ptr(ccx)), 1u) } }; @@ -728,7 +732,7 @@ fn trans_bind_thunk(ccx: @crate_ctxt, let outgoing_args = ty::ty_fn_args(outgoing_fty); // Set up the three implicit arguments to the thunk. - let mut llargs: [ValueRef] = [fcx.llretptr, lltargetenv]; + let mut llargs: [ValueRef]/~ = [fcx.llretptr, lltargetenv]/~; let mut a: uint = first_real_arg; // retptr, env come first let mut b: uint = starting_idx; @@ -740,7 +744,7 @@ fn trans_bind_thunk(ccx: @crate_ctxt, // closure. some(e) { let mut val = - GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, b]); + GEPi(bcx, llcdata, [0u, abi::closure_body_bindings, b]/~); alt ty::resolved_mode(tcx, out_arg.mode) { ast::by_val { @@ -754,13 +758,13 @@ fn trans_bind_thunk(ccx: @crate_ctxt, } ast::by_ref | ast::by_mutbl_ref | ast::by_move { } } - llargs += [val]; + llargs += [val]/~; b += 1u; } // Arg will be provided when the thunk is invoked. none { - llargs += [llvm::LLVMGetParam(llthunk, a as c_uint)]; + llargs += [llvm::LLVMGetParam(llthunk, a as c_uint)]/~; a += 1u; } } diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 8c0abd5166e..109b533b007 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -58,9 +58,9 @@ type stats = mut n_glues_created: uint, mut n_null_glues: uint, mut n_real_glues: uint, - llvm_insn_ctxt: @mut [str], + llvm_insn_ctxt: @mut [str]/~, llvm_insns: hashmap, - fn_times: @mut [{ident: str, time: int}]}; + fn_times: @mut [{ident: str, time: int}]/~}; class BuilderRef_res { let B: BuilderRef; @@ -92,7 +92,7 @@ type crate_ctxt = { monomorphized: hashmap, monomorphizing: hashmap, // Cache computed type parameter uses (see type_use.rs) - type_use_cache: hashmap, + type_use_cache: hashmap, // Cache generated vtables vtables: hashmap, // Cache of constant strings, @@ -129,9 +129,9 @@ type val_self_pair = {v: ValueRef, t: ty::t}; enum local_val { local_mem(ValueRef), local_imm(ValueRef), } -type param_substs = {tys: [ty::t], +type param_substs = {tys: [ty::t]/~, vtables: option, - bounds: @[ty::param_bounds]}; + bounds: @[ty::param_bounds]/~}; // Function context. Every LLVM function we create will have one of // these. @@ -217,7 +217,7 @@ type cleanup_path = {target: option, dest: BasicBlockRef}; fn scope_clean_changed(info: scope_info) { - if info.cleanup_paths.len() > 0u { info.cleanup_paths = []; } + if info.cleanup_paths.len() > 0u { info.cleanup_paths = []/~; } info.landing_pad = none; } @@ -237,7 +237,7 @@ fn add_clean(cx: block, val: ValueRef, ty: ty::t) { let cleanup_type = cleanup_type(cx.tcx(), ty); in_scope_cx(cx) {|info| info.cleanups += [clean({|a|base::drop_ty(a, val, ty)}, - cleanup_type)]; + cleanup_type)]/~; scope_clean_changed(info); } } @@ -257,7 +257,7 @@ fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) { } in_scope_cx(cx) {|info| info.cleanups += [clean_temp(val, {|a|do_drop(a, val, ty)}, - cleanup_type)]; + cleanup_type)]/~; scope_clean_changed(info); } } @@ -269,7 +269,7 @@ fn add_clean_temp_mem(cx: block, val: ValueRef, ty: ty::t) { let cleanup_type = cleanup_type(cx.tcx(), ty); in_scope_cx(cx) {|info| info.cleanups += [clean_temp(val, {|a|base::drop_ty(a, val, ty)}, - cleanup_type)]; + cleanup_type)]/~; scope_clean_changed(info); } } @@ -278,7 +278,7 @@ fn add_clean_free(cx: block, ptr: ValueRef, shared: bool) { else { {|a|base::trans_free(a, ptr)} }; in_scope_cx(cx) {|info| info.cleanups += [clean_temp(ptr, free_fn, - normal_exit_and_unwind)]; + normal_exit_and_unwind)]/~; scope_clean_changed(info); } } @@ -318,10 +318,10 @@ type scope_info = { // A list of functions that must be run at when leaving this // block, cleaning up any variables that were introduced in the // block. - mut cleanups: [cleanup], + mut cleanups: [cleanup]/~, // Existing cleanup paths that may be reused, indexed by destination and // cleared when the set of cleanups changes. - mut cleanup_paths: [cleanup_path], + mut cleanup_paths: [cleanup_path]/~, // Unwinding landing pad. Also cleared when cleanups change. mut landing_pad: option, }; @@ -551,21 +551,21 @@ fn T_size_t(targ_cfg: @session::config) -> TypeRef { ret T_int(targ_cfg); } -fn T_fn(inputs: [TypeRef], output: TypeRef) -> TypeRef unsafe { +fn T_fn(inputs: [TypeRef]/~, output: TypeRef) -> TypeRef unsafe { ret llvm::LLVMFunctionType(output, to_ptr(inputs), inputs.len() as c_uint, False); } fn T_fn_pair(cx: @crate_ctxt, tfn: TypeRef) -> TypeRef { - ret T_struct([T_ptr(tfn), T_opaque_cbox_ptr(cx)]); + ret T_struct([T_ptr(tfn), T_opaque_cbox_ptr(cx)]/~); } fn T_ptr(t: TypeRef) -> TypeRef { ret llvm::LLVMPointerType(t, 0u as c_uint); } -fn T_struct(elts: [TypeRef]) -> TypeRef unsafe { +fn T_struct(elts: [TypeRef]/~) -> TypeRef unsafe { ret llvm::LLVMStructType(to_ptr(elts), elts.len() as c_uint, False); } @@ -574,12 +574,12 @@ fn T_named_struct(name: str) -> TypeRef { ret str::as_c_str(name, {|buf| llvm::LLVMStructCreateNamed(c, buf) }); } -fn set_struct_body(t: TypeRef, elts: [TypeRef]) unsafe { +fn set_struct_body(t: TypeRef, elts: [TypeRef]/~) unsafe { llvm::LLVMStructSetBody(t, to_ptr(elts), elts.len() as c_uint, False); } -fn T_empty_struct() -> TypeRef { ret T_struct([]); } +fn T_empty_struct() -> TypeRef { ret T_struct([]/~); } // A vtable is, in reality, a vtable pointer followed by zero or more pointers // to tydescs and other vtables that it closes over. But the types and number @@ -604,7 +604,7 @@ fn T_task(targ_cfg: @session::config) -> TypeRef { let t_int = T_int(targ_cfg); let elems = [t_int, t_int, t_int, t_int, - t_int, t_int, t_int, t_int]; + t_int, t_int, t_int, t_int]/~; set_struct_body(t, elems); ret t; } @@ -612,7 +612,7 @@ fn T_task(targ_cfg: @session::config) -> TypeRef { fn T_tydesc_field(cx: @crate_ctxt, field: uint) -> TypeRef unsafe { // Bit of a kludge: pick the fn typeref out of the tydesc.. - let tydesc_elts: [TypeRef] = + let tydesc_elts: [TypeRef]/~ = vec::from_elem::(abi::n_tydesc_fields, T_nil()); llvm::LLVMGetStructElementTypes(cx.tydesc_type, @@ -635,14 +635,14 @@ fn T_tydesc(targ_cfg: @session::config) -> TypeRef { let pvoid = T_ptr(T_i8()); let glue_fn_ty = T_ptr(T_fn([T_ptr(T_nil()), T_ptr(T_nil()), tydescpp, - pvoid], T_void())); + pvoid]/~, T_void())); let int_type = T_int(targ_cfg); let elems = [tydescpp, int_type, int_type, glue_fn_ty, glue_fn_ty, glue_fn_ty, glue_fn_ty, int_type, int_type, int_type, int_type, - T_ptr(T_i8()), T_ptr(T_i8()), int_type, int_type]; + T_ptr(T_i8()), T_ptr(T_i8()), int_type, int_type]/~; set_struct_body(tydesc, elems); ret tydesc; } @@ -655,7 +655,7 @@ fn T_array(t: TypeRef, n: uint) -> TypeRef { fn T_vec2(targ_cfg: @session::config, t: TypeRef) -> TypeRef { ret T_struct([T_int(targ_cfg), // fill T_int(targ_cfg), // alloc - T_array(t, 0u)]); // elements + T_array(t, 0u)]/~); // elements } fn T_vec(ccx: @crate_ctxt, t: TypeRef) -> TypeRef { @@ -674,12 +674,12 @@ fn tuplify_box_ty(tcx: ty::ctxt, t: ty::t) -> ty::t { let ptr = ty::mk_ptr(tcx, {ty: ty::mk_nil(tcx), mutbl: ast::m_imm}); ret ty::mk_tup(tcx, [ty::mk_uint(tcx), ty::mk_type(tcx), ptr, ptr, - t]); + t]/~); } -fn T_box_header_fields(cx: @crate_ctxt) -> [TypeRef] { +fn T_box_header_fields(cx: @crate_ctxt) -> [TypeRef]/~ { let ptr = T_ptr(T_i8()); - ret [cx.int_type, T_ptr(cx.tydesc_type), ptr, ptr]; + ret [cx.int_type, T_ptr(cx.tydesc_type), ptr, ptr]/~; } fn T_box_header(cx: @crate_ctxt) -> TypeRef { @@ -687,7 +687,7 @@ fn T_box_header(cx: @crate_ctxt) -> TypeRef { } fn T_box(cx: @crate_ctxt, t: TypeRef) -> TypeRef { - ret T_struct(T_box_header_fields(cx) + [t]); + ret T_struct(T_box_header_fields(cx) + [t]/~); } fn T_box_ptr(t: TypeRef) -> TypeRef { @@ -704,7 +704,7 @@ fn T_opaque_box_ptr(cx: @crate_ctxt) -> TypeRef { } fn T_unique(cx: @crate_ctxt, t: TypeRef) -> TypeRef { - ret T_struct(T_box_header_fields(cx) + [t]); + ret T_struct(T_box_header_fields(cx) + [t]/~); } fn T_unique_ptr(t: TypeRef) -> TypeRef { @@ -713,12 +713,12 @@ fn T_unique_ptr(t: TypeRef) -> TypeRef { } fn T_port(cx: @crate_ctxt, _t: TypeRef) -> TypeRef { - ret T_struct([cx.int_type]); // Refcount + ret T_struct([cx.int_type]/~); // Refcount } fn T_chan(cx: @crate_ctxt, _t: TypeRef) -> TypeRef { - ret T_struct([cx.int_type]); // Refcount + ret T_struct([cx.int_type]/~); // Refcount } @@ -749,7 +749,7 @@ fn T_enum_discrim(cx: @crate_ctxt) -> TypeRef { fn T_opaque_enum(cx: @crate_ctxt) -> TypeRef { let s = "opaque_enum"; alt name_has_type(cx.tn, s) { some(t) { ret t; } _ {} } - let t = T_struct([T_enum_discrim(cx), T_i8()]); + let t = T_struct([T_enum_discrim(cx), T_i8()]/~); associate_type(cx.tn, s, t); ret t; } @@ -763,7 +763,7 @@ fn T_captured_tydescs(cx: @crate_ctxt, n: uint) -> TypeRef { } fn T_opaque_iface(cx: @crate_ctxt) -> TypeRef { - T_struct([T_ptr(cx.tydesc_type), T_opaque_box_ptr(cx)]) + T_struct([T_ptr(cx.tydesc_type), T_opaque_box_ptr(cx)]/~) } fn T_opaque_port_ptr() -> TypeRef { ret T_ptr(T_i8()); } @@ -836,7 +836,7 @@ fn C_cstr(cx: @crate_ctxt, s: str) -> ValueRef { fn C_estr_slice(cx: @crate_ctxt, s: str) -> ValueRef { let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), T_ptr(T_i8())); - C_struct([cs, C_uint(cx, str::len(s) + 1u /* +1 for null */)]) + C_struct([cs, C_uint(cx, str::len(s) + 1u /* +1 for null */)]/~) } // Returns a Plain Old LLVM String: @@ -848,34 +848,34 @@ fn C_postr(s: str) -> ValueRef { fn C_zero_byte_arr(size: uint) -> ValueRef unsafe { let mut i = 0u; - let mut elts: [ValueRef] = []; - while i < size { elts += [C_u8(0u)]; i += 1u; } + let mut elts: [ValueRef]/~ = []/~; + while i < size { elts += [C_u8(0u)]/~; i += 1u; } ret llvm::LLVMConstArray(T_i8(), vec::unsafe::to_ptr(elts), elts.len() as c_uint); } -fn C_struct(elts: [ValueRef]) -> ValueRef unsafe { +fn C_struct(elts: [ValueRef]/~) -> ValueRef unsafe { ret llvm::LLVMConstStruct(vec::unsafe::to_ptr(elts), elts.len() as c_uint, False); } -fn C_named_struct(T: TypeRef, elts: [ValueRef]) -> ValueRef unsafe { +fn C_named_struct(T: TypeRef, elts: [ValueRef]/~) -> ValueRef unsafe { ret llvm::LLVMConstNamedStruct(T, vec::unsafe::to_ptr(elts), elts.len() as c_uint); } -fn C_array(ty: TypeRef, elts: [ValueRef]) -> ValueRef unsafe { +fn C_array(ty: TypeRef, elts: [ValueRef]/~) -> ValueRef unsafe { ret llvm::LLVMConstArray(ty, vec::unsafe::to_ptr(elts), elts.len() as c_uint); } -fn C_bytes(bytes: [u8]) -> ValueRef unsafe { +fn C_bytes(bytes: [u8]/~) -> ValueRef unsafe { ret llvm::LLVMConstString( unsafe::reinterpret_cast(vec::unsafe::to_ptr(bytes)), bytes.len() as c_uint, False); } -fn C_shape(ccx: @crate_ctxt, bytes: [u8]) -> ValueRef { +fn C_shape(ccx: @crate_ctxt, bytes: [u8]/~) -> ValueRef { let llshape = C_bytes(bytes); let llglobal = str::as_c_str(ccx.names("shape"), {|buf| llvm::LLVMAddGlobal(ccx.llmod, val_ty(llshape), buf) @@ -892,11 +892,11 @@ fn get_param(fndecl: ValueRef, param: uint) -> ValueRef { // Used to identify cached monomorphized functions and vtables enum mono_param_id { - mono_precise(ty::t, option<[mono_id]>), + mono_precise(ty::t, option<[mono_id]/~>), mono_any, mono_repr(uint /* size */, uint /* align */), } -type mono_id = @{def: ast::def_id, params: [mono_param_id]}; +type mono_id = @{def: ast::def_id, params: [mono_param_id]/~}; fn hash_mono_id(&&mi: mono_id) -> uint { let mut h = syntax::ast_util::hash_def(mi.def); for vec::each(mi.params) {|param| @@ -954,7 +954,7 @@ fn node_id_type(bcx: block, id: ast::node_id) -> ty::t { fn expr_ty(bcx: block, ex: @ast::expr) -> ty::t { node_id_type(bcx, ex.id) } -fn node_id_type_params(bcx: block, id: ast::node_id) -> [ty::t] { +fn node_id_type_params(bcx: block, id: ast::node_id) -> [ty::t]/~ { let tcx = bcx.tcx(); let params = ty::node_id_to_type_params(tcx, id); alt bcx.fcx.param_substs { @@ -966,7 +966,7 @@ fn node_id_type_params(bcx: block, id: ast::node_id) -> [ty::t] { } fn field_idx_strict(cx: ty::ctxt, sp: span, ident: ast::ident, - fields: [ty::field]) + fields: [ty::field]/~) -> uint { alt ty::field_idx(ident, fields) { none { cx.sess.span_bug(sp, #fmt("base expr doesn't appear to \ @@ -975,7 +975,7 @@ fn field_idx_strict(cx: ty::ctxt, sp: span, ident: ast::ident, } } -fn dummy_substs(tps: [ty::t]) -> ty::substs { +fn dummy_substs(tps: [ty::t]/~) -> ty::substs { {self_r: some(ty::re_bound(ty::br_self)), self_ty: none, tps: tps} diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index 48a087d0c27..219658cebb9 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -64,7 +64,7 @@ fn lli64(val: int) -> ValueRef { fn lli1(bval: bool) -> ValueRef { C_bool(bval) } -fn llmdnode(elems: [ValueRef]) -> ValueRef unsafe { +fn llmdnode(elems: [ValueRef]/~) -> ValueRef unsafe { llvm::LLVMMDNode(vec::unsafe::to_ptr(elems), vec::len(elems) as libc::c_uint) } @@ -99,9 +99,9 @@ fn update_cache(cache: metadata_cache, mdtag: int, val: debug_metadata) { let existing = if cache.contains_key(mdtag) { cache.get(mdtag) } else { - [] + []/~ }; - cache.insert(mdtag, existing + [val]); + cache.insert(mdtag, existing + [val]/~); } type metadata = {node: ValueRef, data: T}; @@ -115,7 +115,7 @@ type block_md = {start: codemap::loc, end: codemap::loc}; type argument_md = {id: ast::node_id}; type retval_md = {id: ast::node_id}; -type metadata_cache = hashmap; +type metadata_cache = hashmap; enum debug_metadata { file_metadata(@metadata), @@ -183,7 +183,7 @@ fn create_compile_unit(cx: @crate_ctxt) lli1(cx.sess.opts.optimize != 0u), llstr(""), // flags (???) lli32(0) // runtime version (???) - ]; + ]/~; let unit_node = llmdnode(unit_metadata); add_named_metadata(cx, "llvm.dbg.cu", unit_node); let mdval = @{node: unit_node, data: {name: crate_name}}; @@ -220,7 +220,7 @@ fn create_file(cx: @crate_ctxt, full_path: str) -> @metadata { let file_md = [lltag(tg), llstr(file_path), llstr(work_dir), - unit_node]; + unit_node]/~; let val = llmdnode(file_md); let mdval = @{node: val, data: {path: full_path}}; update_cache(cache, tg, file_metadata(mdval)); @@ -268,7 +268,7 @@ fn create_block(cx: block) -> @metadata { lli32(start.col as int), file_node.node, lli32(unique_id) - ]; + ]/~; let val = llmdnode(lldata); let mdval = @{node: val, data: {start: start, end: end}}; //update_cache(cache, tg, block_metadata(mdval)); @@ -328,7 +328,7 @@ fn create_basic_type(cx: @crate_ctxt, t: ty::t, ty: ast::prim_ty, span: span) lli64(align * 8), // alignment in bits lli64(0), //XXX offset? lli32(0), //XXX flags? - lli32(encoding)]; + lli32(encoding)]/~; let llnode = llmdnode(lldata); let mdval = @{node: llnode, data: {hash: ty::type_id(t)}}; update_cache(cache, tg, tydesc_metadata(mdval)); @@ -362,7 +362,7 @@ type struct_ctxt = { file: ValueRef, name: str, line: int, - mut members: [ValueRef], + mut members: [ValueRef]/~, mut total_size: int, align: int }; @@ -378,7 +378,7 @@ fn create_structure(file: @metadata, name: str, line: int) let cx = @{file: file.node, name: name, line: line, - mut members: [], + mut members: []/~, mut total_size: 0, align: 64 //XXX different alignment per arch? }; @@ -397,7 +397,7 @@ fn create_derived_type(type_tag: int, file: ValueRef, name: str, line: int, lli64(align), lli64(offset), lli32(0), - ty]; + ty]/~; ret llmdnode(lldata); } @@ -405,11 +405,11 @@ fn add_member(cx: @struct_ctxt, name: str, line: int, size: int, align: int, ty: ValueRef) { cx.members += [create_derived_type(MemberTag, cx.file, name, line, size * 8, align * 8, cx.total_size, - ty)]; + ty)]/~; cx.total_size += size * 8; } -fn create_record(cx: @crate_ctxt, t: ty::t, fields: [ast::ty_field], +fn create_record(cx: @crate_ctxt, t: ty::t, fields: [ast::ty_field]/~, span: span) -> @metadata { let fname = filename_from_span(cx, span); let file_node = create_file(cx, fname); @@ -461,7 +461,7 @@ fn create_boxed_type(cx: @crate_ctxt, outer: ty::t, _inner: ty::t, fn create_composite_type(type_tag: int, name: str, file: ValueRef, line: int, size: int, align: int, offset: int, derived: option, - members: option<[ValueRef]>) + members: option<[ValueRef]/~>) -> ValueRef { let lldata = [lltag(type_tag), file, @@ -484,7 +484,7 @@ fn create_composite_type(type_tag: int, name: str, file: ValueRef, line: int, }, lli32(0), // runtime language llnull() - ]; + ]/~; ret llmdnode(lldata); } @@ -501,12 +501,12 @@ fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t, sys::min_align_of::() as int, size_t_type.node); add_member(scx, "alloc", 0, sys::size_of::() as int, sys::min_align_of::() as int, size_t_type.node); - let subrange = llmdnode([lltag(SubrangeTag), lli64(0), lli64(0)]); + let subrange = llmdnode([lltag(SubrangeTag), lli64(0), lli64(0)]/~); let (arr_size, arr_align) = size_and_align_of(cx, elem_t); let data_ptr = create_composite_type(ArrayTypeTag, "", file_node.node, 0, arr_size, arr_align, 0, option::some(elem_ty_md.node), - option::some([subrange])); + option::some([subrange]/~)); add_member(scx, "data", 0, 0, // clang says the size should be 0 sys::min_align_of::() as int, data_ptr); let llnode = finish_structure(scx); @@ -548,12 +548,12 @@ fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty) ty::ty_uniq(mt) { ast::ty_uniq({ty: t_to_ty(cx, mt.ty, span), mutbl: mt.mutbl}) } ty::ty_rec(fields) { - let fs = []; + let fs = []/~; for field in fields { fs += [{node: {ident: field.ident, mt: {ty: t_to_ty(cx, field.mt.ty, span), mutbl: field.mt.mutbl}}, - span: span}]; + span: span}]/~; } ast::ty_rec(fs) } @@ -630,7 +630,7 @@ fn create_var(type_tag: int, context: ValueRef, name: str, file: ValueRef, lli32(line), ret_ty, lli32(0) - ]; + ]/~; ret llmdnode(lldata); } @@ -678,7 +678,7 @@ fn create_local_var(bcx: block, local: @ast::local) } } }; - let declargs = [llmdnode([llptr]), mdnode]; + let declargs = [llmdnode([llptr]/~), mdnode]/~; trans::build::Call(bcx, cx.intrinsics.get("llvm.dbg.declare"), declargs); ret mdval; @@ -709,7 +709,7 @@ fn create_arg(bcx: block, arg: ast::arg, sp: span) let llptr = alt fcx.llargs.get(arg.id) { local_mem(v) | local_imm(v) { v } }; - let declargs = [llmdnode([llptr]), mdnode]; + let declargs = [llmdnode([llptr]/~), mdnode]/~; trans::build::Call(bcx, cx.intrinsics.get("llvm.dbg.declare"), declargs); ret mdval; @@ -725,7 +725,7 @@ fn update_source_pos(cx: block, s: span) { let scopedata = [lli32(loc.line as int), lli32(loc.col as int), blockmd.node, - llnull()]; + llnull()]/~; let dbgscope = llmdnode(scopedata); llvm::LLVMSetCurrentDebugLocation(trans::build::B(cx), dbgscope); } @@ -796,7 +796,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata { }; let sub_node = create_composite_type(SubroutineTag, "", file_node, 0, 0, 0, 0, option::none, - option::some([ty_node])); + option::some([ty_node]/~)); let fn_metadata = [lltag(SubprogramTag), llunused(), @@ -818,7 +818,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata { //list of template params //func decl descriptor //list of func vars - ]; + ]/~; let val = llmdnode(fn_metadata); add_named_metadata(cx, "llvm.dbg.sp", val); let mdval = @{node: val, data: {id: id}}; diff --git a/src/rustc/middle/trans/impl.rs b/src/rustc/middle/trans/impl.rs index 514a7372660..b38fa05f465 100644 --- a/src/rustc/middle/trans/impl.rs +++ b/src/rustc/middle/trans/impl.rs @@ -16,14 +16,14 @@ import lib::llvm::llvm::LLVMGetParam; import std::map::hashmap; fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, - methods: [@ast::method], tps: [ast::ty_param]) { + methods: [@ast::method]/~, tps: [ast::ty_param]/~) { let _icx = ccx.insn_ctxt("impl::trans_impl"); if tps.len() > 0u { ret; } - let sub_path = path + [path_name(name)]; + let sub_path = path + [path_name(name)]/~; for vec::each(methods) {|m| if m.tps.len() == 0u { let llfn = get_item_val(ccx, m.id); - trans_fn(ccx, sub_path + [path_name(m.ident)], m.decl, m.body, + trans_fn(ccx, sub_path + [path_name(m.ident)]/~, m.decl, m.body, llfn, impl_self(ty::node_id_to_type(ccx.tcx, m.self_id)), none, m.id); } @@ -34,14 +34,14 @@ fn trans_self_arg(bcx: block, base: @ast::expr, derefs: uint) -> result { let _icx = bcx.insn_ctxt("impl::trans_self_arg"); let basety = expr_ty(bcx, base); let m_by_ref = ast::expl(ast::by_ref); - let mut temp_cleanups = []; + let mut temp_cleanups = []/~; let result = trans_arg_expr(bcx, {mode: m_by_ref, ty: basety}, T_ptr(type_of::type_of(bcx.ccx(), basety)), base, temp_cleanups, none, derefs); // by-ref self argument should not require cleanup in the case of // other arguments failing: - assert temp_cleanups == []; + assert temp_cleanups == []/~; ret result; } @@ -73,7 +73,8 @@ fn trans_method_callee(bcx: block, callee_id: ast::node_id, } } -fn method_from_methods(ms: [@ast::method], name: ast::ident) -> ast::def_id { +fn method_from_methods(ms: [@ast::method]/~, name: ast::ident) + -> ast::def_id { local_def(option::get(vec::find(ms, {|m| m.ident == name})).id) } @@ -147,16 +148,16 @@ fn trans_iface_callee(bcx: block, val: ValueRef, -> lval_maybe_callee { let _icx = bcx.insn_ctxt("impl::trans_iface_callee"); let ccx = bcx.ccx(); - let vtable = Load(bcx, PointerCast(bcx, GEPi(bcx, val, [0u, 0u]), + let vtable = Load(bcx, PointerCast(bcx, GEPi(bcx, val, [0u, 0u]/~), T_ptr(T_ptr(T_vtable())))); - let box = Load(bcx, GEPi(bcx, val, [0u, 1u])); - // FIXME[impl] I doubt this is alignment-safe (#2534) - let self = GEPi(bcx, box, [0u, abi::box_field_body]); + let box = Load(bcx, GEPi(bcx, val, [0u, 1u]/~)); + // FIXME[impl]/~ I doubt this is alignment-safe (#2534) + let self = GEPi(bcx, box, [0u, abi::box_field_body]/~); let env = self_env(self, ty::mk_opaque_box(bcx.tcx()), some(box)); let llfty = type_of::type_of_fn_from_ty(ccx, callee_ty); let vtable = PointerCast(bcx, vtable, T_ptr(T_array(T_ptr(llfty), n_method + 1u))); - let mptr = Load(bcx, GEPi(bcx, vtable, [0u, n_method])); + let mptr = Load(bcx, GEPi(bcx, vtable, [0u, n_method]/~)); {bcx: bcx, val: mptr, kind: owned, env: env} } @@ -234,7 +235,7 @@ fn get_vtable(ccx: @crate_ctxt, origin: typeck::vtable_origin) } } -fn make_vtable(ccx: @crate_ctxt, ptrs: [ValueRef]) -> ValueRef { +fn make_vtable(ccx: @crate_ctxt, ptrs: [ValueRef]/~) -> ValueRef { let _icx = ccx.insn_ctxt("impl::make_vtable"); let tbl = C_struct(ptrs); let vt_gvar = str::as_c_str(ccx.names("vtable"), {|buf| @@ -246,7 +247,7 @@ fn make_vtable(ccx: @crate_ctxt, ptrs: [ValueRef]) -> ValueRef { vt_gvar } -fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: [ty::t], +fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: [ty::t]/~, vtables: typeck::vtable_res) -> ValueRef { let _icx = ccx.insn_ctxt("impl::make_impl_vtable"); let tcx = ccx.tcx; @@ -288,12 +289,12 @@ fn trans_cast(bcx: block, val: @ast::expr, id: ast::node_id, dest: dest) let bcx = trans_expr_save_in(bcx, val, body); revoke_clean(bcx, box); let result = get_dest_addr(dest); - Store(bcx, box, PointerCast(bcx, GEPi(bcx, result, [0u, 1u]), + Store(bcx, box, PointerCast(bcx, GEPi(bcx, result, [0u, 1u]/~), T_ptr(val_ty(box)))); let orig = ccx.maps.vtable_map.get(id)[0]; let orig = resolve_vtable_in_fn_ctxt(bcx.fcx, orig); let vtable = get_vtable(bcx.ccx(), orig); - Store(bcx, vtable, PointerCast(bcx, GEPi(bcx, result, [0u, 0u]), + Store(bcx, vtable, PointerCast(bcx, GEPi(bcx, result, [0u, 0u]/~), T_ptr(val_ty(vtable)))); bcx } diff --git a/src/rustc/middle/trans/native.rs b/src/rustc/middle/trans/native.rs index fcc7f370f74..da687c8a3c8 100644 --- a/src/rustc/middle/trans/native.rs +++ b/src/rustc/middle/trans/native.rs @@ -44,7 +44,7 @@ fn is_sse(++c: x86_64_reg_class) -> bool { }; } -fn is_ymm(cls: [x86_64_reg_class]) -> bool { +fn is_ymm(cls: [x86_64_reg_class]/~) -> bool { let len = vec::len(cls); ret (len > 2u && is_sse(cls[0]) && @@ -56,13 +56,13 @@ fn is_ymm(cls: [x86_64_reg_class]) -> bool { cls[3] == sseup_class); } -fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { +fn classify_ty(ty: TypeRef) -> [x86_64_reg_class]/~ { fn align(off: uint, ty: TypeRef) -> uint { let a = ty_align(ty); ret (off + a - 1u) / a * a; } - fn struct_tys(ty: TypeRef) -> [TypeRef] { + fn struct_tys(ty: TypeRef) -> [TypeRef]/~ { let n = llvm::LLVMCountStructElementTypes(ty); let elts = vec::from_elem(n as uint, ptr::null()); vec::as_buf(elts) {|buf| @@ -119,13 +119,13 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { }; } - fn all_mem(cls: [mut x86_64_reg_class]) { + fn all_mem(cls: [mut x86_64_reg_class]/~) { for uint::range(0u, cls.len()) { |i| cls[i] = memory_class; } } - fn unify(cls: [mut x86_64_reg_class], + fn unify(cls: [mut x86_64_reg_class]/~, i: uint, newv: x86_64_reg_class) { if cls[i] == newv { @@ -150,8 +150,8 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { } } - fn classify_struct(tys: [TypeRef], - cls: [mut x86_64_reg_class], i: uint, + fn classify_struct(tys: [TypeRef]/~, + cls: [mut x86_64_reg_class]/~, i: uint, off: uint) { if vec::is_empty(tys) { classify(T_i64(), cls, i, off); @@ -166,7 +166,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { } fn classify(ty: TypeRef, - cls: [mut x86_64_reg_class], ix: uint, + cls: [mut x86_64_reg_class]/~, ix: uint, off: uint) { let t_align = ty_align(ty); let t_size = ty_size(ty); @@ -216,7 +216,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { } } - fn fixup(ty: TypeRef, cls: [mut x86_64_reg_class]) { + fn fixup(ty: TypeRef, cls: [mut x86_64_reg_class]/~) { let mut i = 0u; let llty = llvm::LLVMGetTypeKind(ty) as int; let e = vec::len(cls); @@ -274,8 +274,8 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] { ret vec::from_mut(cls); } -fn llreg_ty(cls: [x86_64_reg_class]) -> TypeRef { - fn llvec_len(cls: [x86_64_reg_class]) -> uint { +fn llreg_ty(cls: [x86_64_reg_class]/~) -> TypeRef { + fn llvec_len(cls: [x86_64_reg_class]/~) -> uint { let mut len = 1u; for vec::each(cls) {|c| if c != sseup_class { @@ -286,27 +286,27 @@ fn llreg_ty(cls: [x86_64_reg_class]) -> TypeRef { ret len; } - let mut tys = []; + let mut tys = []/~; let mut i = 0u; let e = vec::len(cls); while i < e { alt cls[i] { integer_class { - tys += [T_i64()]; + tys += [T_i64()]/~; } sse_fv_class { let vec_len = llvec_len(vec::tailn(cls, i + 1u)) * 2u; let vec_ty = llvm::LLVMVectorType(T_f32(), vec_len as c_uint); - tys += [vec_ty]; + tys += [vec_ty]/~; i += vec_len; cont; } sse_fs_class { - tys += [T_f32()]; + tys += [T_f32()]/~; } sse_ds_class { - tys += [T_f64()]; + tys += [T_f64()]/~; } _ { fail "llregtype: unhandled class"; @@ -323,13 +323,13 @@ type x86_64_llty = { }; type x86_64_tys = { - arg_tys: [x86_64_llty], + arg_tys: [x86_64_llty]/~, ret_ty: x86_64_llty, - attrs: [option], + attrs: [option]/~, sret: bool }; -fn x86_64_tys(atys: [TypeRef], +fn x86_64_tys(atys: [TypeRef]/~, rty: TypeRef, ret_def: bool) -> x86_64_tys { fn is_reg_ty(ty: TypeRef) -> bool { @@ -342,18 +342,18 @@ fn x86_64_tys(atys: [TypeRef], }; } - fn is_pass_byval(cls: [x86_64_reg_class]) -> bool { + fn is_pass_byval(cls: [x86_64_reg_class]/~) -> bool { ret cls[0] == memory_class || cls[0] == x87_class || cls[0] == complex_x87_class; } - fn is_ret_bysret(cls: [x86_64_reg_class]) -> bool { + fn is_ret_bysret(cls: [x86_64_reg_class]/~) -> bool { ret cls[0] == memory_class; } fn x86_64_ty(ty: TypeRef, - is_mem_cls: fn(cls: [x86_64_reg_class]) -> bool, + is_mem_cls: fn(cls: [x86_64_reg_class]/~) -> bool, attr: Attribute) -> (x86_64_llty, option) { let mut cast = false; let mut ty_attr = option::none; @@ -371,22 +371,22 @@ fn x86_64_tys(atys: [TypeRef], ret ({ cast: cast, ty: llty }, ty_attr); } - let mut arg_tys = []; - let mut attrs = []; + let mut arg_tys = []/~; + let mut attrs = []/~; for vec::each(atys) {|t| let (ty, attr) = x86_64_ty(t, is_pass_byval, ByValAttribute); - arg_tys += [ty]; - attrs += [attr]; + arg_tys += [ty]/~; + attrs += [attr]/~; } let mut (ret_ty, ret_attr) = x86_64_ty(rty, is_ret_bysret, StructRetAttribute); let sret = option::is_some(ret_attr); if sret { - arg_tys = [ret_ty] + arg_tys; + arg_tys = [ret_ty]/~ + arg_tys; ret_ty = { cast: false, ty: T_void() }; - attrs = [ret_attr] + attrs; + attrs = [ret_attr]/~ + attrs; } else if !ret_def { ret_ty = { cast: false, ty: T_void() @@ -427,7 +427,7 @@ fn link_name(i: @ast::native_item) -> str { } type c_stack_tys = { - arg_tys: [TypeRef], + arg_tys: [TypeRef]/~, ret_ty: TypeRef, ret_def: bool, bundle_ty: TypeRef, @@ -436,7 +436,7 @@ type c_stack_tys = { }; fn c_arg_and_ret_lltys(ccx: @crate_ctxt, - id: ast::node_id) -> ([TypeRef], TypeRef, ty::t) { + id: ast::node_id) -> ([TypeRef]/~, TypeRef, ty::t) { alt ty::get(ty::node_id_to_type(ccx.tcx, id)).struct { ty::ty_fn({inputs: arg_tys, output: ret_ty, _}) { let llargtys = type_of_explicit_args(ccx, arg_tys); @@ -450,7 +450,7 @@ fn c_arg_and_ret_lltys(ccx: @crate_ctxt, fn c_stack_tys(ccx: @crate_ctxt, id: ast::node_id) -> @c_stack_tys { let (llargtys, llretty, ret_ty) = c_arg_and_ret_lltys(ccx, id); - let bundle_ty = T_struct(llargtys + [T_ptr(llretty)]); + let bundle_ty = T_struct(llargtys + [T_ptr(llretty)]/~); let ret_def = !ty::type_is_bot(ret_ty) && !ty::type_is_nil(ret_ty); let x86_64 = if ccx.sess.targ_cfg.arch == arch_x86_64 { option::some(x86_64_tys(llargtys, llretty, ret_def)) @@ -462,13 +462,13 @@ fn c_stack_tys(ccx: @crate_ctxt, ret_ty: llretty, ret_def: ret_def, bundle_ty: bundle_ty, - shim_fn_ty: T_fn([T_ptr(bundle_ty)], T_void()), + shim_fn_ty: T_fn([T_ptr(bundle_ty)]/~, T_void()), x86_64_tys: x86_64 }; } type shim_arg_builder = fn(bcx: block, tys: @c_stack_tys, - llargbundle: ValueRef) -> [ValueRef]; + llargbundle: ValueRef) -> [ValueRef]/~; type shim_ret_builder = fn(bcx: block, tys: @c_stack_tys, llargbundle: ValueRef, llretval: ValueRef); @@ -485,7 +485,7 @@ fn build_shim_fn_(ccx: @crate_ctxt, ccx.llmod, shim_name, tys.shim_fn_ty); // Declare the body of the shim function: - let fcx = new_fn_ctxt(ccx, [], llshimfn, none); + let fcx = new_fn_ctxt(ccx, []/~, llshimfn, none); let bcx = top_scope_block(fcx, none); let lltop = bcx.llbb; let llargbundle = get_param(llshimfn, 0u); @@ -519,7 +519,7 @@ fn build_wrap_fn_(ccx: @crate_ctxt, ret_builder: wrap_ret_builder) { let _icx = ccx.insn_ctxt("native::build_wrap_fn_"); - let fcx = new_fn_ctxt(ccx, [], llwrapfn, none); + let fcx = new_fn_ctxt(ccx, []/~, llwrapfn, none); let bcx = top_scope_block(fcx, none); let lltop = bcx.llbb; @@ -530,7 +530,7 @@ fn build_wrap_fn_(ccx: @crate_ctxt, // Create call itself. let llshimfnptr = PointerCast(bcx, llshimfn, T_ptr(T_i8())); let llrawargbundle = PointerCast(bcx, llargbundle, T_ptr(T_i8())); - Call(bcx, shim_upcall, [llrawargbundle, llshimfnptr]); + Call(bcx, shim_upcall, [llrawargbundle, llshimfnptr]/~); ret_builder(bcx, tys, llargbundle); tie_up_header_blocks(fcx, lltop); @@ -588,9 +588,9 @@ fn trans_native_mod(ccx: @crate_ctxt, let _icx = ccx.insn_ctxt("native::build_shim_fn"); fn build_args(bcx: block, tys: @c_stack_tys, - llargbundle: ValueRef) -> [ValueRef] { + llargbundle: ValueRef) -> [ValueRef]/~ { let _icx = bcx.insn_ctxt("native::shim::build_args"); - let mut llargvals = []; + let mut llargvals = []/~; let mut i = 0u; let n = vec::len(tys.arg_tys); @@ -599,33 +599,33 @@ fn trans_native_mod(ccx: @crate_ctxt, let mut atys = x86_64.arg_tys; let mut attrs = x86_64.attrs; if x86_64.sret { - let llretptr = GEPi(bcx, llargbundle, [0u, n]); + let llretptr = GEPi(bcx, llargbundle, [0u, n]/~); let llretloc = Load(bcx, llretptr); - llargvals = [llretloc]; + llargvals = [llretloc]/~; atys = vec::tail(atys); attrs = vec::tail(attrs); } while i < n { let llargval = if atys[i].cast { let arg_ptr = GEPi(bcx, llargbundle, - [0u, i]); + [0u, i]/~); let arg_ptr = BitCast(bcx, arg_ptr, T_ptr(atys[i].ty)); Load(bcx, arg_ptr) } else if option::is_some(attrs[i]) { - GEPi(bcx, llargbundle, [0u, i]) + GEPi(bcx, llargbundle, [0u, i]/~) } else { - load_inbounds(bcx, llargbundle, [0u, i]) + load_inbounds(bcx, llargbundle, [0u, i]/~) }; - llargvals += [llargval]; + llargvals += [llargval]/~; i += 1u; } } _ { while i < n { let llargval = load_inbounds(bcx, llargbundle, - [0u, i]); - llargvals += [llargval]; + [0u, i]/~); + llargvals += [llargval]/~; i += 1u; } } @@ -652,7 +652,7 @@ fn trans_native_mod(ccx: @crate_ctxt, ret; } let n = vec::len(tys.arg_tys); - let llretptr = GEPi(bcx, llargbundle, [0u, n]); + let llretptr = GEPi(bcx, llargbundle, [0u, n]/~); let llretloc = Load(bcx, llretptr); if x86_64.ret_ty.cast { let tmp_ptr = BitCast(bcx, llretloc, @@ -666,7 +666,7 @@ fn trans_native_mod(ccx: @crate_ctxt, if tys.ret_def { let n = vec::len(tys.arg_tys); // R** llretptr = &args->r; - let llretptr = GEPi(bcx, llargbundle, [0u, n]); + let llretptr = GEPi(bcx, llargbundle, [0u, n]/~); // R* llretloc = *llretptr; /* (args->r) */ let llretloc = Load(bcx, llretptr); // *args->r = r; @@ -705,7 +705,7 @@ fn trans_native_mod(ccx: @crate_ctxt, fn build_direct_fn(ccx: @crate_ctxt, decl: ValueRef, item: @ast::native_item, tys: @c_stack_tys, cc: lib::llvm::CallConv) { - let fcx = new_fn_ctxt(ccx, [], decl, none); + let fcx = new_fn_ctxt(ccx, []/~, decl, none); let bcx = top_scope_block(fcx, none), lltop = bcx.llbb; let llbasefn = base_fn(ccx, link_name(item), tys, cc); let ty = ty::lookup_item_type(ccx.tcx, @@ -736,11 +736,11 @@ fn trans_native_mod(ccx: @crate_ctxt, let implicit_args = first_real_arg; // ret + env while i < n { let llargval = get_param(llwrapfn, i + implicit_args); - store_inbounds(bcx, llargval, llargbundle, [0u, i]); + store_inbounds(bcx, llargval, llargbundle, [0u, i]/~); i += 1u; } let llretptr = get_param(llwrapfn, 0u); - store_inbounds(bcx, llretptr, llargbundle, [0u, n]); + store_inbounds(bcx, llretptr, llargbundle, [0u, n]/~); } fn build_ret(bcx: block, _tys: @c_stack_tys, @@ -786,9 +786,9 @@ fn trans_native_mod(ccx: @crate_ctxt, } }; let psubsts = { - tys: [], + tys: []/~, vtables: none, - bounds: @[] + bounds: @[]/~ }; trans_intrinsic(ccx, llwrapfn, native_item, *path, psubsts, none); @@ -894,7 +894,7 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::native_item, } "frame_address" { let frameaddress = ccx.intrinsics.get("llvm.frameaddress"); - let frameaddress_val = Call(bcx, frameaddress, [C_i32(0i32)]); + let frameaddress_val = Call(bcx, frameaddress, [C_i32(0i32)]/~); let fty = ty::mk_fn(bcx.tcx(), { purity: ast::impure_fn, proto: ast::proto_any, @@ -903,10 +903,10 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::native_item, ty: ty::mk_imm_ptr( bcx.tcx(), ty::mk_mach_uint(bcx.tcx(), ast::ty_u8)) - }], + }]/~, output: ty::mk_nil(bcx.tcx()), ret_style: ast::return_val, - constraints: [] + constraints: []/~ }); bcx = trans_call_inner(bcx, none, fty, ty::mk_nil(bcx.tcx()), { |bcx| @@ -915,7 +915,7 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::native_item, get_param(decl, first_real_arg), temporary) }, - arg_vals([frameaddress_val]), ignore); + arg_vals([frameaddress_val]/~), ignore); } } build_return(bcx); @@ -933,7 +933,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, let _icx = ccx.insn_ctxt("native::crust::build_rust_fn"); let t = ty::node_id_to_type(ccx.tcx, id); let ps = link::mangle_internal_name_by_path( - ccx, path + [ast_map::path_name(@"__rust_abi")]); + ccx, path + [ast_map::path_name(@"__rust_abi")]/~); let llty = type_of_fn_from_ty(ccx, t); let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty); trans_fn(ccx, path, decl, body, llfndecl, no_self, none, id); @@ -946,18 +946,18 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, let _icx = ccx.insn_ctxt("native::crust::build_shim_fn"); fn build_args(bcx: block, tys: @c_stack_tys, - llargbundle: ValueRef) -> [ValueRef] { + llargbundle: ValueRef) -> [ValueRef]/~ { let _icx = bcx.insn_ctxt("native::crust::shim::build_args"); - let mut llargvals = []; + let mut llargvals = []/~; let mut i = 0u; let n = vec::len(tys.arg_tys); - let llretptr = load_inbounds(bcx, llargbundle, [0u, n]); - llargvals += [llretptr]; + let llretptr = load_inbounds(bcx, llargbundle, [0u, n]/~); + llargvals += [llretptr]/~; let llenvptr = C_null(T_opaque_box_ptr(bcx.ccx())); - llargvals += [llenvptr]; + llargvals += [llenvptr]/~; while i < n { - let llargval = load_inbounds(bcx, llargbundle, [0u, i]); - llargvals += [llargval]; + let llargval = load_inbounds(bcx, llargbundle, [0u, i]/~); + llargvals += [llargval]/~; i += 1u; } ret llargvals; @@ -970,7 +970,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, } let shim_name = link::mangle_internal_name_by_path( - ccx, path + [ast_map::path_name(@"__rust_stack_shim")]); + ccx, path + [ast_map::path_name(@"__rust_stack_shim")]/~); ret build_shim_fn_(ccx, shim_name, llrustfn, tys, lib::llvm::CCallConv, build_args, build_ret); @@ -1007,18 +1007,20 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, let mut argval = get_param(llwrapfn, i + j); if option::is_some(attrs[i]) { argval = Load(bcx, argval); - store_inbounds(bcx, argval, llargbundle, [0u, i]); + store_inbounds(bcx, argval, llargbundle, + [0u, i]/~); } else if atys[i].cast { - let argptr = GEPi(bcx, llargbundle, [0u, i]); + let argptr = GEPi(bcx, llargbundle, [0u, i]/~); let argptr = BitCast(bcx, argptr, T_ptr(atys[i].ty)); Store(bcx, argval, argptr); } else { - store_inbounds(bcx, argval, llargbundle, [0u, i]); + store_inbounds(bcx, argval, llargbundle, + [0u, i]/~); } i += 1u; } - store_inbounds(bcx, llretptr, llargbundle, [0u, n]); + store_inbounds(bcx, llretptr, llargbundle, [0u, n]/~); } _ { let llretptr = alloca(bcx, tys.ret_ty); @@ -1026,9 +1028,9 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, for uint::range(0u, n) {|i| let llargval = get_param(llwrapfn, i); store_inbounds(bcx, llargval, llargbundle, - [0u, i]); + [0u, i]/~); }; - store_inbounds(bcx, llretptr, llargbundle, [0u, n]); + store_inbounds(bcx, llretptr, llargbundle, [0u, n]/~); } } } @@ -1043,7 +1045,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, ret; } let n = vec::len(tys.arg_tys); - let llretval = load_inbounds(bcx, llargbundle, [0u, n]); + let llretval = load_inbounds(bcx, llargbundle, [0u, n]/~); let llretval = if x86_64.ret_ty.cast { let retptr = BitCast(bcx, llretval, T_ptr(x86_64.ret_ty.ty)); @@ -1055,7 +1057,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, } _ { let n = vec::len(tys.arg_tys); - let llretval = load_inbounds(bcx, llargbundle, [0u, n]); + let llretval = load_inbounds(bcx, llargbundle, [0u, n]/~); let llretval = Load(bcx, llretval); Ret(bcx, llretval); } diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index 86da0c72ecc..a745c347853 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -31,7 +31,7 @@ fn find_reachable(crate_mod: _mod, exp_map: resolve::exp_map, rmap } -fn traverse_exports(cx: ctx, vis: [@view_item]) -> bool { +fn traverse_exports(cx: ctx, vis: [@view_item]/~) -> bool { let mut found_export = false; for vec::each(vis) {|vi| alt vi.node { diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs index 0ca89ac6a94..bad3d52f147 100644 --- a/src/rustc/middle/trans/reflect.rs +++ b/src/rustc/middle/trans/reflect.rs @@ -12,7 +12,7 @@ import util::ppaux::ty_to_str; enum reflector = { visitor_val: ValueRef, - visitor_methods: @[ty::method], + visitor_methods: @[ty::method]/~, mut bcx: block }; @@ -31,15 +31,15 @@ impl methods for reflector { do_spill_noroot(self.bcx, ss) } - fn c_size_and_align(t: ty::t) -> [ValueRef] { + fn c_size_and_align(t: ty::t) -> [ValueRef]/~ { let tr = type_of::type_of(self.bcx.ccx(), t); let s = shape::llsize_of_real(self.bcx.ccx(), tr); let a = shape::llalign_of_min(self.bcx.ccx(), tr); ret [self.c_uint(s), - self.c_uint(a)]; + self.c_uint(a)]/~; } - fn visit(ty_name: str, args: [ValueRef]) { + fn visit(ty_name: str, args: [ValueRef]/~) { let tcx = self.bcx.tcx(); let mth_idx = option::get(ty::method_idx(@("visit_" + ty_name), *self.visitor_methods)); @@ -69,33 +69,33 @@ impl methods for reflector { abi::tydesc_field_visit_glue); } - fn bracketed_t(bracket_name: str, t: ty::t, extra: [ValueRef]) { + fn bracketed_t(bracket_name: str, t: ty::t, extra: [ValueRef]/~) { self.visit("enter_" + bracket_name, extra); self.visit_tydesc(t); self.visit("leave_" + bracket_name, extra); } - fn bracketed_mt(bracket_name: str, mt: ty::mt, extra: [ValueRef]) { + fn bracketed_mt(bracket_name: str, mt: ty::mt, extra: [ValueRef]/~) { self.bracketed_t(bracket_name, mt.ty, - [self.c_uint(mt.mutbl as uint)] + extra); + [self.c_uint(mt.mutbl as uint)]/~ + extra); } fn vstore_name_and_extra(t: ty::t, vstore: ty::vstore, - f: fn(str,[ValueRef])) { + f: fn(str,[ValueRef]/~)) { alt vstore { ty::vstore_fixed(n) { - let extra = [self.c_uint(n)] + self.c_size_and_align(t); + let extra = [self.c_uint(n)]/~ + self.c_size_and_align(t); f("fixed", extra) } - ty::vstore_slice(_) { f("slice", []) } - ty::vstore_uniq { f("uniq", []);} - ty::vstore_box { f("box", []); } + ty::vstore_slice(_) { f("slice", []/~) } + ty::vstore_uniq { f("uniq", []/~);} + ty::vstore_box { f("box", []/~); } } } fn leaf(name: str) { - self.visit(name, []); + self.visit(name, []/~); } // Entrypoint @@ -125,7 +125,7 @@ impl methods for reflector { ty::ty_float(ast::ty_f64) { self.leaf("f64") } ty::ty_str { self.leaf("str") } - ty::ty_vec(mt) { self.bracketed_mt("vec", mt, []) } + ty::ty_vec(mt) { self.bracketed_mt("vec", mt, []/~) } ty::ty_estr(vst) { self.vstore_name_and_extra(t, vst) {|name, extra| self.visit("estr_" + name, extra) @@ -136,29 +136,29 @@ impl methods for reflector { self.bracketed_mt("evec_" + name, mt, extra) } } - ty::ty_box(mt) { self.bracketed_mt("box", mt, []) } - ty::ty_uniq(mt) { self.bracketed_mt("uniq", mt, []) } - ty::ty_ptr(mt) { self.bracketed_mt("ptr", mt, []) } - ty::ty_rptr(_, mt) { self.bracketed_mt("rptr", mt, []) } + ty::ty_box(mt) { self.bracketed_mt("box", mt, []/~) } + ty::ty_uniq(mt) { self.bracketed_mt("uniq", mt, []/~) } + ty::ty_ptr(mt) { self.bracketed_mt("ptr", mt, []/~) } + ty::ty_rptr(_, mt) { self.bracketed_mt("rptr", mt, []/~) } ty::ty_rec(fields) { - let extra = ([self.c_uint(vec::len(fields))] + let extra = ([self.c_uint(vec::len(fields))]/~ + self.c_size_and_align(t)); self.visit("enter_rec", extra); for fields.eachi {|i, field| self.bracketed_mt("rec_field", field.mt, [self.c_uint(i), - self.c_slice(*field.ident)]); + self.c_slice(*field.ident)]/~); } self.visit("leave_rec", extra); } ty::ty_tup(tys) { - let extra = ([self.c_uint(vec::len(tys))] + let extra = ([self.c_uint(vec::len(tys))]/~ + self.c_size_and_align(t)); self.visit("enter_tup", extra); for tys.eachi {|i, t| - self.bracketed_t("tup_field", t, [self.c_uint(i)]); + self.bracketed_t("tup_field", t, [self.c_uint(i)]/~); } self.visit("leave_tup", extra); } @@ -186,7 +186,7 @@ impl methods for reflector { let extra = [self.c_uint(pureval), self.c_uint(protoval), self.c_uint(vec::len(fty.inputs)), - self.c_uint(retval)]; + self.c_uint(retval)]/~; self.visit("enter_fn", extra); for fty.inputs.eachi {|i, arg| let modeval = alt arg.mode { @@ -203,10 +203,10 @@ impl methods for reflector { }; self.bracketed_t("fn_input", arg.ty, [self.c_uint(i), - self.c_uint(modeval)]); + self.c_uint(modeval)]/~); } self.bracketed_t("fn_output", fty.output, - [self.c_uint(retval)]); + [self.c_uint(retval)]/~); self.visit("leave_fn", extra); } @@ -214,14 +214,14 @@ impl methods for reflector { let bcx = self.bcx; let tcx = bcx.ccx().tcx; let fields = ty::class_items_as_fields(tcx, did, substs); - let extra = ([self.c_uint(vec::len(fields))] + let extra = ([self.c_uint(vec::len(fields))]/~ + self.c_size_and_align(t)); self.visit("enter_class", extra); for fields.eachi {|i, field| self.bracketed_mt("class_field", field.mt, [self.c_uint(i), - self.c_slice(*field.ident)]); + self.c_slice(*field.ident)]/~); } self.visit("leave_class", extra); } @@ -234,7 +234,7 @@ impl methods for reflector { let bcx = self.bcx; let tcx = bcx.ccx().tcx; let variants = ty::substd_enum_variants(tcx, did, substs); - let extra = ([self.c_uint(vec::len(variants))] + let extra = ([self.c_uint(vec::len(variants))]/~ + self.c_size_and_align(t)); self.visit("enter_enum", extra); @@ -242,11 +242,11 @@ impl methods for reflector { let extra = [self.c_uint(i), self.c_int(v.disr_val), self.c_uint(vec::len(v.args)), - self.c_slice(*v.name)]; + self.c_slice(*v.name)]/~; self.visit("enter_enum_variant", extra); for v.args.eachi {|j, a| self.bracketed_t("enum_variant_field", a, - [self.c_uint(j)]); + [self.c_uint(j)]/~); } self.visit("leave_enum_variant", extra); } @@ -257,20 +257,20 @@ impl methods for reflector { ty::ty_iface(_, _) { self.leaf("iface") } ty::ty_var(_) { self.leaf("var") } ty::ty_var_integral(_) { self.leaf("var_integral") } - ty::ty_param(n, _) { self.visit("param", [self.c_uint(n)]) } + ty::ty_param(n, _) { self.visit("param", [self.c_uint(n)]/~) } ty::ty_self { self.leaf("self") } ty::ty_type { self.leaf("type") } ty::ty_opaque_box { self.leaf("opaque_box") } - ty::ty_constr(t, _) { self.bracketed_t("constr", t, []) } + ty::ty_constr(t, _) { self.bracketed_t("constr", t, []/~) } ty::ty_opaque_closure_ptr(ck) { let ckval = alt ck { ty::ck_block { 0u } ty::ck_box { 1u } ty::ck_uniq { 2u } }; - self.visit("closure_ptr", [self.c_uint(ckval)]) + self.visit("closure_ptr", [self.c_uint(ckval)]/~) } - ty::ty_unboxed_vec(mt) { self.bracketed_mt("vec", mt, []) } + ty::ty_unboxed_vec(mt) { self.bracketed_mt("vec", mt, []/~) } } } } diff --git a/src/rustc/middle/trans/shape.rs b/src/rustc/middle/trans/shape.rs index 4ef40f4f924..9b8f99e4c36 100644 --- a/src/rustc/middle/trans/shape.rs +++ b/src/rustc/middle/trans/shape.rs @@ -22,11 +22,11 @@ import std::map::hashmap; import ty_ctxt = middle::ty::ctxt; type nominal_id = @{did: ast::def_id, parent_id: option, - tps: [ty::t]}; + tps: [ty::t]/~}; fn mk_nominal_id(tcx: ty::ctxt, did: ast::def_id, parent_id: option, - tps: [ty::t]) -> nominal_id { + tps: [ty::t]/~) -> nominal_id { let tps_norm = tps.map { |t| ty::normalize_ty(tcx, t) }; @{did: did, parent_id: parent_id, tps: tps_norm} } @@ -197,44 +197,46 @@ fn mk_ctxt(llmod: ModuleRef) -> ctxt { llshapetables: llshapetables}; } -fn add_bool(&dest: [u8], val: bool) { dest += [if val { 1u8 } else { 0u8 }]; } - -fn add_u16(&dest: [u8], val: u16) { - dest += [(val & 0xffu16) as u8, (val >> 8u16) as u8]; +fn add_bool(&dest: [u8]/~, val: bool) { + dest += [if val { 1u8 } else { 0u8 }]/~; } -fn add_substr(&dest: [u8], src: [u8]) { +fn add_u16(&dest: [u8]/~, val: u16) { + dest += [(val & 0xffu16) as u8, (val >> 8u16) as u8]/~; +} + +fn add_substr(&dest: [u8]/~, src: [u8]/~) { add_u16(dest, vec::len(src) as u16); dest += src; } -fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { +fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8]/~ { alt ty::get(t).struct { ty::ty_nil | ty::ty_bool | ty::ty_uint(ast::ty_u8) | - ty::ty_bot { [shape_u8] } - ty::ty_int(ast::ty_i) { [s_int(ccx.tcx)] } - ty::ty_float(ast::ty_f) { [s_float(ccx.tcx)] } - ty::ty_uint(ast::ty_u) | ty::ty_ptr(_) { [s_uint(ccx.tcx)] } - ty::ty_type { [s_tydesc(ccx.tcx)] } - ty::ty_int(ast::ty_i8) { [shape_i8] } - ty::ty_uint(ast::ty_u16) { [shape_u16] } - ty::ty_int(ast::ty_i16) { [shape_i16] } - ty::ty_uint(ast::ty_u32) { [shape_u32] } - ty::ty_int(ast::ty_i32) | ty::ty_int(ast::ty_char) { [shape_i32] } - ty::ty_uint(ast::ty_u64) { [shape_u64] } - ty::ty_int(ast::ty_i64) { [shape_i64] } - ty::ty_float(ast::ty_f32) { [shape_f32] } - ty::ty_float(ast::ty_f64) { [shape_f64] } + ty::ty_bot { [shape_u8]/~ } + ty::ty_int(ast::ty_i) { [s_int(ccx.tcx)]/~ } + ty::ty_float(ast::ty_f) { [s_float(ccx.tcx)]/~ } + ty::ty_uint(ast::ty_u) | ty::ty_ptr(_) { [s_uint(ccx.tcx)]/~ } + ty::ty_type { [s_tydesc(ccx.tcx)]/~ } + ty::ty_int(ast::ty_i8) { [shape_i8]/~ } + ty::ty_uint(ast::ty_u16) { [shape_u16]/~ } + ty::ty_int(ast::ty_i16) { [shape_i16]/~ } + ty::ty_uint(ast::ty_u32) { [shape_u32]/~ } + ty::ty_int(ast::ty_i32) | ty::ty_int(ast::ty_char) { [shape_i32]/~ } + ty::ty_uint(ast::ty_u64) { [shape_u64]/~ } + ty::ty_int(ast::ty_i64) { [shape_i64]/~ } + ty::ty_float(ast::ty_f32) { [shape_f32]/~ } + ty::ty_float(ast::ty_f64) { [shape_f64]/~ } ty::ty_estr(ty::vstore_uniq) | ty::ty_str { shape_of(ccx, tvec::expand_boxed_vec_ty(ccx.tcx, t)) } ty::ty_enum(did, substs) { alt enum_kind(ccx, did) { - tk_unit { [s_variant_enum_t(ccx.tcx)] } - tk_enum { [s_variant_enum_t(ccx.tcx)] } + tk_unit { [s_variant_enum_t(ccx.tcx)]/~ } + tk_enum { [s_variant_enum_t(ccx.tcx)]/~ } tk_newtype | tk_complex { - let mut s = [shape_enum], id; + let mut s = [shape_enum]/~, id; let nom_id = mk_nominal_id(ccx.tcx, did, none, substs.tps); alt ccx.shape_cx.tag_id_to_index.find(nom_id) { none { @@ -257,14 +259,14 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_estr(ty::vstore_box) | ty::ty_evec(_, ty::vstore_box) | - ty::ty_box(_) | ty::ty_opaque_box { [shape_box] } + ty::ty_box(_) | ty::ty_opaque_box { [shape_box]/~ } ty::ty_uniq(mt) { - let mut s = [shape_uniq]; + let mut s = [shape_uniq]/~; add_substr(s, shape_of(ccx, mt.ty)); s } ty::ty_unboxed_vec(mt) { - let mut s = [shape_unboxed_vec]; + let mut s = [shape_unboxed_vec]/~; add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty)); add_substr(s, shape_of(ccx, mt.ty)); s @@ -275,7 +277,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_estr(ty::vstore_fixed(n)) { - let mut s = [shape_fixedvec]; + let mut s = [shape_fixedvec]/~; let u8_t = ty::mk_mach_uint(ccx.tcx, ast::ty_u8); assert (n + 1u) <= 0xffffu; add_u16(s, (n + 1u) as u16); @@ -285,7 +287,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_evec(mt, ty::vstore_fixed(n)) { - let mut s = [shape_fixedvec]; + let mut s = [shape_fixedvec]/~; assert n <= 0xffffu; add_u16(s, n as u16); add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty)); @@ -294,7 +296,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_estr(ty::vstore_slice(r)) { - let mut s = [shape_slice]; + let mut s = [shape_slice]/~; let u8_t = ty::mk_mach_uint(ccx.tcx, ast::ty_u8); add_bool(s, true); // is_pod add_bool(s, true); // is_str @@ -303,7 +305,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_evec(mt, ty::vstore_slice(r)) { - let mut s = [shape_slice]; + let mut s = [shape_slice]/~; add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty)); add_bool(s, false); // is_str add_substr(s, shape_of(ccx, mt.ty)); @@ -311,7 +313,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } ty::ty_rec(fields) { - let mut s = [shape_struct], sub = []; + let mut s = [shape_struct]/~, sub = []/~; for vec::each(fields) {|f| sub += shape_of(ccx, f.mt.ty); } @@ -319,22 +321,22 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { s } ty::ty_tup(elts) { - let mut s = [shape_struct], sub = []; + let mut s = [shape_struct]/~, sub = []/~; for vec::each(elts) {|elt| sub += shape_of(ccx, elt); } add_substr(s, sub); s } - ty::ty_iface(_, _) { [shape_box_fn] } + ty::ty_iface(_, _) { [shape_box_fn]/~ } ty::ty_class(did, substs) { // same as records, unless there's a dtor let tps = substs.tps; let m_dtor_did = ty::ty_dtor(ccx.tcx, did); let mut s = if option::is_some(m_dtor_did) { - [shape_res] + [shape_res]/~ } - else { [shape_struct] }, sub = []; + else { [shape_struct]/~ }, sub = []/~; option::iter(m_dtor_did) {|dtor_did| let ri = @{did: dtor_did, parent_id: some(did), tps: tps}; let id = interner::intern(ccx.shape_cx.resources, ri); @@ -351,19 +353,19 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { s } ty::ty_rptr(_, mt) { - let mut s = [shape_rptr]; + let mut s = [shape_rptr]/~; add_substr(s, shape_of(ccx, mt.ty)); s } ty::ty_param(*) { ccx.tcx.sess.bug("non-monomorphized type parameter"); } - ty::ty_fn({proto: ast::proto_box, _}) { [shape_box_fn] } - ty::ty_fn({proto: ast::proto_uniq, _}) { [shape_uniq_fn] } + ty::ty_fn({proto: ast::proto_box, _}) { [shape_box_fn]/~ } + ty::ty_fn({proto: ast::proto_uniq, _}) { [shape_uniq_fn]/~ } ty::ty_fn({proto: ast::proto_block, _}) | - ty::ty_fn({proto: ast::proto_any, _}) { [shape_stack_fn] } - ty::ty_fn({proto: ast::proto_bare, _}) { [shape_bare_fn] } - ty::ty_opaque_closure_ptr(_) { [shape_opaque_closure_ptr] } + ty::ty_fn({proto: ast::proto_any, _}) { [shape_stack_fn]/~ } + ty::ty_fn({proto: ast::proto_bare, _}) { [shape_bare_fn]/~ } + ty::ty_opaque_closure_ptr(_) { [shape_opaque_closure_ptr]/~ } ty::ty_constr(inner_t, _) { shape_of(ccx, inner_t) } ty::ty_var(_) | ty::ty_var_integral(_) | ty::ty_self { ccx.sess.bug("shape_of: unexpected type struct found"); @@ -371,8 +373,8 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { } } -fn shape_of_variant(ccx: @crate_ctxt, v: ty::variant_info) -> [u8] { - let mut s = []; +fn shape_of_variant(ccx: @crate_ctxt, v: ty::variant_info) -> [u8]/~ { + let mut s = []/~; for vec::each(v.args) {|t| s += shape_of(ccx, t); } ret s; } @@ -381,23 +383,23 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { // Loop over all the enum variants and write their shapes into a // data buffer. As we do this, it's possible for us to discover // new enums, so we must do this first. - let mut data = []; - let mut offsets = []; + let mut data = []/~; + let mut offsets = []/~; let mut i = 0u; - let mut enum_variants = []; + let mut enum_variants = []/~; while i < ccx.shape_cx.tag_order.len() { let {did, substs} = ccx.shape_cx.tag_order[i]; let variants = @ty::substd_enum_variants(ccx.tcx, did, substs); vec::iter(*variants) {|v| - offsets += [vec::len(data) as u16]; + offsets += [vec::len(data) as u16]/~; let variant_shape = shape_of_variant(ccx, v); add_substr(data, variant_shape); - let zname = str::bytes(*v.name) + [0u8]; + let zname = str::bytes(*v.name) + [0u8]/~; add_substr(data, zname); } - enum_variants += [variants]; + enum_variants += [variants]/~; i += 1u; } @@ -405,8 +407,8 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { // info records for each enum) and the info space (which contains offsets // to each variant shape). As we do so, build up the header. - let mut header = []; - let mut inf = []; + let mut header = []/~; + let mut inf = []/~; let header_sz = 2u16 * ccx.shape_cx.next_tag_id; let data_sz = vec::len(data) as u16; @@ -421,7 +423,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { // variant. Also construct the largest-variant table for each enum, which // contains the variants that the size-of operation needs to look at. - let mut lv_table = []; + let mut lv_table = []/~; let mut i = 0u; for enum_variants.each { |variants| add_u16(inf, vec::len(*variants) as u16); @@ -445,7 +447,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { // Write in the static size and alignment of the enum. add_u16(inf, size_align.size); - inf += [size_align.align]; + inf += [size_align.align]/~; // Now write in the offset of each variant. for vec::each(*variants) {|_v| @@ -467,14 +469,14 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { /* tjc: Not annotating FIXMEs in this module because of #1498 */ fn largest_variants(ccx: @crate_ctxt, - variants: @[ty::variant_info]) -> [uint] { + variants: @[ty::variant_info]/~) -> [uint]/~ { // Compute the minimum and maximum size and alignment for each // variant. // // NB: We could do better here; e.g. we know that any // variant that contains (T,T) must be as least as large as // any variant that contains just T. - let mut ranges = []; + let mut ranges = []/~; for vec::each(*variants) {|variant| let mut bounded = true; let mut min_size = 0u, min_align = 0u; @@ -493,12 +495,12 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { ranges += [{size: {min: min_size, bounded: bounded}, - align: {min: min_align, bounded: bounded}}]; + align: {min: min_align, bounded: bounded}}]/~; } // Initialize the candidate set to contain all variants. - let mut candidates = [mut]; - for vec::each(*variants) {|_v| candidates += [mut true]; } + let mut candidates = [mut]/~; + for vec::each(*variants) {|_v| candidates += [mut true]/~; } // Do a pairwise comparison among all variants still in the // candidate set. Throw out any variant that we know has size @@ -531,24 +533,25 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { } // Return the resulting set. - let mut result = []; + let mut result = []/~; let mut i = 0u; while i < vec::len(candidates) { - if candidates[i] { result += [i]; } + if candidates[i] { vec::push(result, i); } i += 1u; } ret result; } - fn compute_static_enum_size(ccx: @crate_ctxt, largest_variants: [uint], - variants: @[ty::variant_info]) -> size_align { + fn compute_static_enum_size(ccx: @crate_ctxt, largest_variants: [uint]/~, + variants: @[ty::variant_info]/~) + -> size_align { let mut max_size = 0u16; let mut max_align = 1u8; for vec::each(largest_variants) {|vid| // We increment a "virtual data pointer" to compute the size. - let mut lltys = []; + let mut lltys = []/~; for vec::each(variants[vid].args) {|typ| - lltys += [type_of::type_of(ccx, typ)]; + lltys += [type_of::type_of(ccx, typ)]/~; } let llty = trans::common::T_struct(lltys); @@ -574,13 +577,13 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { } fn gen_resource_shapes(ccx: @crate_ctxt) -> ValueRef { - let mut dtors = []; + let mut dtors = []/~; let len = interner::len(ccx.shape_cx.resources); for uint::range(0u, len) {|i| let ri = interner::get(ccx.shape_cx.resources, i); for ri.tps.each() {|s| assert !ty::type_has_params(s); } option::iter(ri.parent_id) {|id| - dtors += [trans::base::get_res_dtor(ccx, ri.did, id, ri.tps)]; + dtors += [trans::base::get_res_dtor(ccx, ri.did, id, ri.tps)]/~; } } ret mk_global(ccx, "resource_shapes", C_struct(dtors), true); @@ -591,11 +594,11 @@ fn gen_shape_tables(ccx: @crate_ctxt) { let llresourcestable = gen_resource_shapes(ccx); trans::common::set_struct_body(ccx.shape_cx.llshapetablesty, [val_ty(lltagstable), - val_ty(llresourcestable)]); + val_ty(llresourcestable)]/~); let lltables = C_named_struct(ccx.shape_cx.llshapetablesty, - [lltagstable, llresourcestable]); + [lltagstable, llresourcestable]/~); lib::llvm::llvm::LLVMSetInitializer(ccx.shape_cx.llshapetables, lltables); lib::llvm::llvm::LLVMSetGlobalConstant(ccx.shape_cx.llshapetables, True); lib::llvm::SetLinkage(ccx.shape_cx.llshapetables, @@ -724,10 +727,10 @@ fn simplify_type(tcx: ty::ctxt, typ: ty::t) -> ty::t { ty::ty_evec(_, ty::vstore_uniq) | ty::ty_evec(_, ty::vstore_box) | ty::ty_estr(ty::vstore_uniq) | ty::ty_estr(ty::vstore_box) | ty::ty_ptr(_) | ty::ty_rptr(_,_) { nilptr(tcx) } - ty::ty_fn(_) { ty::mk_tup(tcx, [nilptr(tcx), nilptr(tcx)]) } + ty::ty_fn(_) { ty::mk_tup(tcx, [nilptr(tcx), nilptr(tcx)]/~) } ty::ty_evec(_, ty::vstore_slice(_)) | ty::ty_estr(ty::vstore_slice(_)) { - ty::mk_tup(tcx, [nilptr(tcx), ty::mk_int(tcx)]) + ty::mk_tup(tcx, [nilptr(tcx), ty::mk_int(tcx)]/~) } _ { typ } } @@ -736,7 +739,7 @@ fn simplify_type(tcx: ty::ctxt, typ: ty::t) -> ty::t { } // Given a tag type `ty`, returns the offset of the payload. -//fn tag_payload_offs(bcx: block, tag_id: ast::def_id, tps: [ty::t]) +//fn tag_payload_offs(bcx: block, tag_id: ast::def_id, tps: [ty::t]/~) // -> ValueRef { // alt tag_kind(tag_id) { // tk_unit | tk_enum | tk_newtype { C_int(bcx.ccx(), 0) } diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index d05629b990f..7bc0ca50608 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -36,30 +36,30 @@ fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t { fn get_fill(bcx: block, vptr: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("tvec::get_fill"); - Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_fill])) + Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_fill]/~)) } fn set_fill(bcx: block, vptr: ValueRef, fill: ValueRef) { - Store(bcx, fill, GEPi(bcx, vptr, [0u, abi::vec_elt_fill])); + Store(bcx, fill, GEPi(bcx, vptr, [0u, abi::vec_elt_fill]/~)); } fn get_alloc(bcx: block, vptr: ValueRef) -> ValueRef { - Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc])) + Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc]/~)) } fn get_bodyptr(bcx: block, vptr: ValueRef) -> ValueRef { - non_gc_box_cast(bcx, GEPi(bcx, vptr, [0u, abi::box_field_body])) + non_gc_box_cast(bcx, GEPi(bcx, vptr, [0u, abi::box_field_body]/~)) } fn get_dataptr(bcx: block, vptr: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("tvec::get_dataptr"); - GEPi(bcx, vptr, [0u, abi::vec_elt_elems, 0u]) + GEPi(bcx, vptr, [0u, abi::vec_elt_elems, 0u]/~) } fn pointer_add(bcx: block, ptr: ValueRef, bytes: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("tvec::pointer_add"); let old_ty = val_ty(ptr); let bptr = PointerCast(bcx, ptr, T_ptr(T_i8())); - ret PointerCast(bcx, InBoundsGEP(bcx, bptr, [bytes]), old_ty); + ret PointerCast(bcx, InBoundsGEP(bcx, bptr, [bytes]/~), old_ty); } fn alloc_raw(bcx: block, unit_ty: ty::t, @@ -71,8 +71,8 @@ fn alloc_raw(bcx: block, unit_ty: ty::t, let vecsize = Add(bcx, alloc, llsize_of(ccx, ccx.opaque_vec_type)); let {box, body} = base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize); - Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill])); - Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc])); + Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]/~)); + Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]/~)); ret {bcx: bcx, val: box}; } fn alloc_uniq_raw(bcx: block, unit_ty: ty::t, @@ -119,7 +119,7 @@ fn make_drop_glue_unboxed(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> } else { bcx } } -fn trans_evec(bcx: block, args: [@ast::expr], +fn trans_evec(bcx: block, args: [@ast::expr]/~, vst: ast::vstore, id: ast::node_id, dest: dest) -> block { let _icx = bcx.insn_ctxt("tvec::trans_evec"); let ccx = bcx.ccx(); @@ -158,9 +158,9 @@ fn trans_evec(bcx: block, args: [@ast::expr], let len = Mul(bcx, n, unit_sz); let p = base::alloca(bcx, T_struct([T_ptr(llunitty), - ccx.int_type])); - Store(bcx, vp, GEPi(bcx, p, [0u, abi::slice_elt_base])); - Store(bcx, len, GEPi(bcx, p, [0u, abi::slice_elt_len])); + ccx.int_type]/~)); + Store(bcx, vp, GEPi(bcx, p, [0u, abi::slice_elt_base]/~)); + Store(bcx, len, GEPi(bcx, p, [0u, abi::slice_elt_len]/~)); {bcx: bcx, val: p, dataptr: vp} } @@ -182,12 +182,12 @@ fn trans_evec(bcx: block, args: [@ast::expr], // Store the individual elements. - let mut i = 0u, temp_cleanups = [val]; + let mut i = 0u, temp_cleanups = [val]/~; #debug("trans_evec: v: %s, dataptr: %s", val_str(ccx.tn, val), val_str(ccx.tn, dataptr)); for vec::each(args) {|e| - let lleltptr = InBoundsGEP(bcx, dataptr, [C_uint(ccx, i)]); + let lleltptr = InBoundsGEP(bcx, dataptr, [C_uint(ccx, i)]/~); bcx = base::trans_expr_save_in(bcx, e, lleltptr); add_clean_temp_mem(bcx, lleltptr, unit_ty); vec::push(temp_cleanups, lleltptr); @@ -242,14 +242,14 @@ fn get_base_and_len(cx: block, v: ValueRef, e_ty: ty::t) alt vstore { ty::vstore_fixed(n) { - let base = GEPi(cx, v, [0u, 0u]); + let base = GEPi(cx, v, [0u, 0u]/~); let n = if ty::type_is_str(e_ty) { n + 1u } else { n }; let len = Mul(cx, C_uint(ccx, n), unit_sz); (base, len) } ty::vstore_slice(_) { - let base = Load(cx, GEPi(cx, v, [0u, abi::slice_elt_base])); - let len = Load(cx, GEPi(cx, v, [0u, abi::slice_elt_len])); + let base = Load(cx, GEPi(cx, v, [0u, abi::slice_elt_base]/~)); + let len = Load(cx, GEPi(cx, v, [0u, abi::slice_elt_len]/~)); (base, len) } ty::vstore_uniq | ty::vstore_box { @@ -268,7 +268,7 @@ fn trans_estr(bcx: block, s: @str, vstore: ast::vstore, let c = alt vstore { ast::vstore_fixed(_) { - // "hello"/_ => "hello"/5 => [i8 x 6] in llvm + // "hello"/_ => "hello"/5 => [i8 x 6]/~ in llvm #debug("trans_estr: fixed: %s", *s); C_postr(*s) } @@ -282,7 +282,7 @@ fn trans_estr(bcx: block, s: @str, vstore: ast::vstore, ast::vstore_uniq { let cs = PointerCast(bcx, C_cstr(ccx, *s), T_ptr(T_i8())); let len = C_uint(ccx, str::len(*s)); - let c = Call(bcx, ccx.upcalls.str_new_uniq, [cs, len]); + let c = Call(bcx, ccx.upcalls.str_new_uniq, [cs, len]/~); PointerCast(bcx, c, T_unique_ptr(T_unique(ccx, T_vec(ccx, T_i8())))) } @@ -290,7 +290,7 @@ fn trans_estr(bcx: block, s: @str, vstore: ast::vstore, ast::vstore_box { let cs = PointerCast(bcx, C_cstr(ccx, *s), T_ptr(T_i8())); let len = C_uint(ccx, str::len(*s)); - let c = Call(bcx, ccx.upcalls.str_new_shared, [cs, len]); + let c = Call(bcx, ccx.upcalls.str_new_shared, [cs, len]/~); PointerCast(bcx, c, T_box_ptr(T_box(ccx, T_vec(ccx, T_i8())))) } @@ -317,7 +317,7 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, let opaque_lhs = PointerCast(bcx, lhsptr, T_ptr(T_ptr(T_i8()))); Call(bcx, ccx.upcalls.vec_grow, - [opaque_lhs, new_fill]); + [opaque_lhs, new_fill]/~); // Was overwritten if we resized let lhs = Load(bcx, lhsptr); let rhs = Select(bcx, self_append, lhs, rhs); @@ -333,14 +333,14 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, let write_ptr = Load(bcx, write_ptr_ptr); let bcx = copy_val(bcx, INIT, write_ptr, load_if_immediate(bcx, addr, unit_ty), unit_ty); - Store(bcx, InBoundsGEP(bcx, write_ptr, [C_int(ccx, 1)]), + Store(bcx, InBoundsGEP(bcx, write_ptr, [C_int(ccx, 1)]/~), write_ptr_ptr); bcx }) } fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, - vals: [@ast::expr]) -> block { + vals: [@ast::expr]/~) -> block { let _icx = bcx.insn_ctxt("tvec::trans_append_literal"); let mut bcx = bcx, ccx = bcx.ccx(); let elt_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); @@ -357,7 +357,7 @@ fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, bcx = base::with_cond(bcx, do_grow) {|bcx| let pt = PointerCast(bcx, vptrptr, T_ptr(T_ptr(T_i8()))); - Call(bcx, ccx.upcalls.vec_grow, [pt, new_fill]); + Call(bcx, ccx.upcalls.vec_grow, [pt, new_fill]/~); bcx }; let vptr = get_bodyptr(bcx, Load(bcx, vptrptr)); @@ -380,7 +380,7 @@ fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef, if ty::get(vec_ty).struct == ty::ty_str { let lhs = PointerCast(bcx, lhs, T_ptr(T_i8())); let rhs = PointerCast(bcx, rhs, T_ptr(T_i8())); - let n = Call(bcx, ccx.upcalls.str_concat, [lhs, rhs]); + let n = Call(bcx, ccx.upcalls.str_concat, [lhs, rhs]/~); let n = PointerCast( bcx, n, T_unique_ptr(T_unique(ccx, T_vec(ccx, llunitty)))); ret base::store_in_dest(bcx, n, dest); @@ -401,7 +401,7 @@ fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef, let write_ptr = Load(bcx, write_ptr_ptr); let bcx = copy_val(bcx, INIT, write_ptr, load_if_immediate(bcx, addr, unit_ty), unit_ty); - Store(bcx, InBoundsGEP(bcx, write_ptr, [C_int(ccx, 1)]), + Store(bcx, InBoundsGEP(bcx, write_ptr, [C_int(ccx, 1)]/~), write_ptr_ptr); ret bcx; }; @@ -430,7 +430,8 @@ fn iter_vec_raw(bcx: block, data_ptr: ValueRef, vec_ty: ty::t, // Now perform the iteration. let header_cx = sub_block(bcx, "iter_vec_loop_header"); Br(bcx, header_cx.llbb); - let data_ptr = Phi(header_cx, val_ty(data_ptr), [data_ptr], [bcx.llbb]); + let data_ptr = + Phi(header_cx, val_ty(data_ptr), [data_ptr]/~, [bcx.llbb]/~); let not_yet_at_end = ICmp(header_cx, lib::llvm::IntULT, data_ptr, data_end_ptr); let body_cx = sub_block(header_cx, "iter_vec_loop_body"); @@ -438,7 +439,7 @@ fn iter_vec_raw(bcx: block, data_ptr: ValueRef, vec_ty: ty::t, CondBr(header_cx, not_yet_at_end, body_cx.llbb, next_cx.llbb); let body_cx = f(body_cx, data_ptr, unit_ty); AddIncomingToPhi(data_ptr, InBoundsGEP(body_cx, data_ptr, - [C_int(bcx.ccx(), 1)]), + [C_int(bcx.ccx(), 1)]/~), body_cx.llbb); Br(body_cx, header_cx.llbb); ret next_cx; diff --git a/src/rustc/middle/trans/type_of.rs b/src/rustc/middle/trans/type_of.rs index 2b610a99662..db459ddae00 100644 --- a/src/rustc/middle/trans/type_of.rs +++ b/src/rustc/middle/trans/type_of.rs @@ -14,7 +14,8 @@ export type_of_fn_from_ty; export type_of_fn; export type_of_non_gc_box; -fn type_of_explicit_args(cx: @crate_ctxt, inputs: [ty::arg]) -> [TypeRef] { +fn type_of_explicit_args(cx: @crate_ctxt, + inputs: [ty::arg]/~) -> [TypeRef]/~ { vec::map(inputs) {|arg| let arg_ty = arg.ty; let llty = type_of(cx, arg_ty); @@ -25,14 +26,15 @@ fn type_of_explicit_args(cx: @crate_ctxt, inputs: [ty::arg]) -> [TypeRef] { } } -fn type_of_fn(cx: @crate_ctxt, inputs: [ty::arg], output: ty::t) -> TypeRef { - let mut atys: [TypeRef] = []; +fn type_of_fn(cx: @crate_ctxt, inputs: [ty::arg]/~, + output: ty::t) -> TypeRef { + let mut atys: [TypeRef]/~ = []/~; // Arg 0: Output pointer. - atys += [T_ptr(type_of(cx, output))]; + atys += [T_ptr(type_of(cx, output))]/~; // Arg 1: Environment - atys += [T_opaque_box_ptr(cx)]; + atys += [T_opaque_box_ptr(cx)]/~; // ... then explicit args. atys += type_of_explicit_args(cx, inputs); @@ -115,12 +117,12 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { ty::ty_evec(mt, ty::vstore_slice(_)) { T_struct([T_ptr(type_of(cx, mt.ty)), - T_uint_ty(cx, ast::ty_u)]) + T_uint_ty(cx, ast::ty_u)]/~) } ty::ty_estr(ty::vstore_slice(_)) { T_struct([T_ptr(T_i8()), - T_uint_ty(cx, ast::ty_u)]) + T_uint_ty(cx, ast::ty_u)]/~) } ty::ty_estr(ty::vstore_fixed(n)) { @@ -132,10 +134,10 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { } ty::ty_rec(fields) { - let mut tys: [TypeRef] = []; + let mut tys: [TypeRef]/~ = []/~; for vec::each(fields) {|f| let mt_ty = f.mt.ty; - tys += [type_of(cx, mt_ty)]; + tys += [type_of(cx, mt_ty)]/~; } T_struct(tys) } @@ -143,9 +145,9 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { ty::ty_iface(_, _) { T_opaque_iface(cx) } ty::ty_type { T_ptr(cx.tydesc_type) } ty::ty_tup(elts) { - let mut tys = []; + let mut tys = []/~; for vec::each(elts) {|elt| - tys += [type_of(cx, elt)]; + tys += [type_of(cx, elt)]/~; } T_struct(tys) } @@ -180,7 +182,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { if ty::ty_dtor(cx.tcx, did) != none { // resource type - tys = [T_i8(), T_struct(tys)]; + tys = [T_i8(), T_struct(tys)]/~; } common::set_struct_body(llty, tys); @@ -213,13 +215,13 @@ fn type_of_enum(cx: @crate_ctxt, did: ast::def_id, t: ty::t) let degen = (*ty::enum_variants(cx.tcx, did)).len() == 1u; let size = shape::static_size_of_enum(cx, t); if !degen { - [T_enum_discrim(cx), T_array(T_i8(), size)] + [T_enum_discrim(cx), T_array(T_i8(), size)]/~ } else if size == 0u { - [T_enum_discrim(cx)] + [T_enum_discrim(cx)]/~ } else { - [T_array(T_i8(), size)] + [T_array(T_i8(), size)]/~ } }; @@ -250,7 +252,7 @@ fn llvm_type_name(cx: @crate_ctxt, t: ty::t) -> str { fn type_of_dtor(ccx: @crate_ctxt, self_ty: ty::t) -> TypeRef { T_fn([T_ptr(type_of(ccx, ty::mk_nil(ccx.tcx))), - T_ptr(type_of(ccx, self_ty))], + T_ptr(type_of(ccx, self_ty))]/~, llvm::LLVMVoidType()) } diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs index fb0b983ae65..325647bfe44 100644 --- a/src/rustc/middle/trans/type_use.rs +++ b/src/rustc/middle/trans/type_use.rs @@ -31,10 +31,10 @@ const use_repr: uint = 1u; // Dependency on size/alignment and take/drop glue const use_tydesc: uint = 2u; // Takes the tydesc, or compares type ctx = {ccx: @crate_ctxt, - uses: [mut type_uses]}; + uses: [mut type_uses]/~}; fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) - -> [type_uses] { + -> [type_uses]/~ { alt ccx.type_use_cache.find(fn_id) { some(uses) { ret uses; } none {} diff --git a/src/rustc/middle/trans/uniq.rs b/src/rustc/middle/trans/uniq.rs index a3325fc4904..ec4fd7680ba 100644 --- a/src/rustc/middle/trans/uniq.rs +++ b/src/rustc/middle/trans/uniq.rs @@ -56,9 +56,9 @@ fn duplicate(bcx: block, v: ValueRef, t: ty::t) -> result { let bcx = copy_val(bcx, INIT, dst_body, src_body, content_ty); let src_tydesc_ptr = GEPi(bcx, src_box, - [0u, back::abi::box_field_tydesc]); + [0u, back::abi::box_field_tydesc]/~); let dst_tydesc_ptr = GEPi(bcx, dst_box, - [0u, back::abi::box_field_tydesc]); + [0u, back::abi::box_field_tydesc]/~); let td = Load(bcx, src_tydesc_ptr); Store(bcx, td, dst_tydesc_ptr); diff --git a/src/rustc/middle/tstate/annotate.rs b/src/rustc/middle/tstate/annotate.rs index d20cbcfe4d4..0427e480181 100644 --- a/src/rustc/middle/tstate/annotate.rs +++ b/src/rustc/middle/tstate/annotate.rs @@ -7,13 +7,13 @@ import aux::{num_constraints, get_fn_info, crate_ctxt, add_node}; import ann::empty_ann; import pat_util::pat_binding_ids; -fn collect_ids_expr(e: @expr, rs: @mut [node_id]) { vec::push(*rs, e.id); } +fn collect_ids_expr(e: @expr, rs: @mut [node_id]/~) { vec::push(*rs, e.id); } -fn collect_ids_block(b: blk, rs: @mut [node_id]) { +fn collect_ids_block(b: blk, rs: @mut [node_id]/~) { vec::push(*rs, b.node.id); } -fn collect_ids_stmt(s: @stmt, rs: @mut [node_id]) { +fn collect_ids_stmt(s: @stmt, rs: @mut [node_id]/~) { alt s.node { stmt_decl(_, id) | stmt_expr(_, id) | stmt_semi(_, id) { #debug["node_id %s", int::str(id)]; @@ -23,11 +23,11 @@ fn collect_ids_stmt(s: @stmt, rs: @mut [node_id]) { } } -fn collect_ids_local(tcx: ty::ctxt, l: @local, rs: @mut [node_id]) { +fn collect_ids_local(tcx: ty::ctxt, l: @local, rs: @mut [node_id]/~) { *rs += pat_binding_ids(tcx.def_map, l.node.pat); } -fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut [node_id]) { +fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut [node_id]/~) { let collect_ids = visit::mk_simple_visitor(@{visit_expr: {|a|collect_ids_expr(a, rs)}, visit_block: {|a|collect_ids_block(a, rs)}, @@ -38,7 +38,7 @@ fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut [node_id]) { collect_ids.visit_block(body, (), collect_ids); } -fn init_vecs(ccx: crate_ctxt, node_ids: [node_id], len: uint) { +fn init_vecs(ccx: crate_ctxt, node_ids: [node_id]/~, len: uint) { for node_ids.each {|i| log(debug, int::str(i) + " |-> " + uint::str(len)); add_node(ccx, i, empty_ann(len)); @@ -46,7 +46,7 @@ fn init_vecs(ccx: crate_ctxt, node_ids: [node_id], len: uint) { } fn visit_fn(ccx: crate_ctxt, num_constraints: uint, body: blk) { - let node_ids: @mut [node_id] = @mut []; + let node_ids: @mut [node_id]/~ = @mut []/~; node_ids_in_fn(ccx.tcx, body, node_ids); let node_id_vec = *node_ids; init_vecs(ccx, node_id_vec, num_constraints); diff --git a/src/rustc/middle/tstate/auxiliary.rs b/src/rustc/middle/tstate/auxiliary.rs index 5fb79dc1318..eee67f1a1f7 100644 --- a/src/rustc/middle/tstate/auxiliary.rs +++ b/src/rustc/middle/tstate/auxiliary.rs @@ -34,7 +34,7 @@ fn def_id_to_str(d: def_id) -> str { ret int::str(d.crate) + "," + int::str(d.node); } -fn comma_str(args: [@constr_arg_use]) -> str { +fn comma_str(args: [@constr_arg_use]/~) -> str { let mut rslt = ""; let mut comma = false; for args.each {|a| @@ -93,7 +93,7 @@ fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) { log(error, tritv_to_str(fcx, v)); } -fn tos(v: [uint]) -> str { +fn tos(v: [uint]/~) -> str { let mut rslt = ""; for v.each {|i| if i == 0u { @@ -103,9 +103,9 @@ fn tos(v: [uint]) -> str { ret rslt; } -fn log_cond(v: [uint]) { log(debug, tos(v)); } +fn log_cond(v: [uint]/~) { log(debug, tos(v)); } -fn log_cond_err(v: [uint]) { log(error, tos(v)); } +fn log_cond_err(v: [uint]/~) { log(error, tos(v)); } fn log_pp(pp: pre_and_post) { let p1 = tritv::to_vec(pp.precondition); @@ -145,7 +145,7 @@ fn log_states_err(pp: pre_and_post_state) { fn print_ident(i: ident) { log(debug, " " + *i + " "); } -fn print_idents(&idents: [ident]) { +fn print_idents(&idents: [ident]/~) { if vec::len::(idents) == 0u { ret; } log(debug, "an ident: " + *vec::pop::(idents)); print_idents(idents); @@ -180,7 +180,7 @@ to represent predicate *arguments* however. This type Both types store an ident and span, for error-logging purposes. */ -type pred_args_ = {args: [@constr_arg_use], bit_num: uint}; +type pred_args_ = {args: [@constr_arg_use]/~, bit_num: uint}; type pred_args = spanned; @@ -203,7 +203,7 @@ type constraint = { type tsconstr = { path: @path, def_id: def_id, - args: [@constr_arg_use] + args: [@constr_arg_use]/~ }; type sp_constr = spanned; @@ -224,11 +224,11 @@ type fn_info = {constrs: constr_map, num_constraints: uint, cf: ret_style, - used_vars: @mut [node_id], + used_vars: @mut [node_id]/~, ignore: bool}; /* mapping from node ID to typestate annotation */ -type node_ann_table = @mut [mut ts_ann]; +type node_ann_table = @mut [mut ts_ann]/~; /* mapping from function name to fn_info map */ @@ -436,7 +436,7 @@ fn pure_exp(ccx: crate_ctxt, id: node_id, p: prestate) -> bool { fn num_constraints(m: fn_info) -> uint { ret m.num_constraints; } fn new_crate_ctxt(cx: ty::ctxt) -> crate_ctxt { - let na: [mut ts_ann] = [mut]; + let na: [mut ts_ann]/~ = [mut]/~; ret {tcx: cx, node_anns: @mut na, fm: int_hash::()}; } @@ -450,10 +450,10 @@ fn controlflow_expr(ccx: crate_ctxt, e: @expr) -> ret_style { } } -fn constraints_expr(cx: ty::ctxt, e: @expr) -> [@ty::constr] { +fn constraints_expr(cx: ty::ctxt, e: @expr) -> [@ty::constr]/~ { alt ty::get(ty::node_id_to_type(cx, e.id)).struct { ty::ty_fn(f) { ret f.constraints; } - _ { ret []; } + _ { ret []/~; } } } @@ -471,14 +471,14 @@ fn node_id_to_def(ccx: crate_ctxt, id: node_id) -> option { ret ccx.tcx.def_map.find(id); } -fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] { - let mut rslt: [norm_constraint] = []; +fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint]/~ { + let mut rslt: [norm_constraint]/~ = []/~; for (*c.descs).each {|pd| rslt += [{bit_num: pd.node.bit_num, c: respan(pd.span, {path: c.path, def_id: id, - args: pd.node.args})}]; + args: pd.node.args})}]/~; } ret rslt; } @@ -486,8 +486,8 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] { // Tried to write this as an iterator, but I got a // non-exhaustive match in trans. -fn constraints(fcx: fn_ctxt) -> [norm_constraint] { - let mut rslt: [norm_constraint] = []; +fn constraints(fcx: fn_ctxt) -> [norm_constraint]/~ { + let mut rslt: [norm_constraint]/~ = []/~; for fcx.enclosing.constrs.each {|key, val| rslt += norm_a_constraint(key, val); }; @@ -497,7 +497,7 @@ fn constraints(fcx: fn_ctxt) -> [norm_constraint] { // FIXME (#2539): Would rather take an immutable vec as an argument, // should freeze it at some earlier point. fn match_args(fcx: fn_ctxt, occs: @dvec, - occ: [@constr_arg_use]) -> uint { + occ: [@constr_arg_use]/~) -> uint { #debug("match_args: looking at %s", constr_args_to_str(fn@(i: inst) -> str { ret *i.ident; }, occ)); for (*occs).each {|pd| @@ -549,10 +549,11 @@ fn expr_to_constr_arg(tcx: ty::ctxt, e: @expr) -> @constr_arg_use { } } -fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] { +fn exprs_to_constr_args(tcx: ty::ctxt, + args: [@expr]/~) -> [@constr_arg_use]/~ { let f = {|a|expr_to_constr_arg(tcx, a)}; - let mut rslt: [@constr_arg_use] = []; - for args.each {|e| rslt += [f(e)]; } + let mut rslt: [@constr_arg_use]/~ = []/~; + for args.each {|e| rslt += [f(e)]/~; } rslt } @@ -584,18 +585,18 @@ fn pred_args_to_str(p: pred_args) -> str { + ">" } -fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) -> +fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr]/~, c: @ty::constr) -> tsconstr { - let mut rslt: [@constr_arg_use] = []; + let mut rslt: [@constr_arg_use]/~ = []/~; for c.node.args.each {|a| - rslt += [substitute_arg(cx, actuals, a)]; + rslt += [substitute_arg(cx, actuals, a)]/~; } ret {path: c.node.path, def_id: c.node.id, args: rslt}; } -fn substitute_arg(cx: ty::ctxt, actuals: [@expr], a: @constr_arg) -> +fn substitute_arg(cx: ty::ctxt, actuals: [@expr]/~, a: @constr_arg) -> @constr_arg_use { let num_actuals = vec::len(actuals); alt a.node { @@ -611,7 +612,7 @@ fn substitute_arg(cx: ty::ctxt, actuals: [@expr], a: @constr_arg) -> } } -fn pred_args_matches(pattern: [constr_arg_general_], +fn pred_args_matches(pattern: [constr_arg_general_]/~, desc: pred_args) -> bool { let mut i = 0u; @@ -637,8 +638,8 @@ fn pred_args_matches(pattern: [constr_arg_general_], ret true; } -fn find_instance_(pattern: [constr_arg_general_], - descs: [pred_args]) -> +fn find_instance_(pattern: [constr_arg_general_]/~, + descs: [pred_args]/~) -> option { for descs.each {|d| if pred_args_matches(pattern, d) { ret some(d.node.bit_num); } @@ -653,13 +654,13 @@ enum dest { call // RHS is passed to a function } -type subst = [{from: inst, to: inst}]; +type subst = [{from: inst, to: inst}]/~; fn find_instances(_fcx: fn_ctxt, subst: subst, - c: constraint) -> [{from: uint, to: uint}] { + c: constraint) -> [{from: uint, to: uint}]/~ { - if vec::len(subst) == 0u { ret []; } - let mut res = []; + if vec::len(subst) == 0u { ret []/~; } + let mut res = []/~; (*c.descs).swap { |v| let v <- vec::from_mut(v); for v.each { |d| @@ -667,7 +668,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst, let old_bit_num = d.node.bit_num; let newv = replace(subst, d); alt find_instance_(newv, v) { - some(d1) {res += [{from: old_bit_num, to: d1}]} + some(d1) {res += [{from: old_bit_num, to: d1}]/~} _ {} } } else {} @@ -688,7 +689,7 @@ fn find_in_subst_bool(s: subst, id: node_id) -> bool { is_some(find_in_subst(id, s)) } -fn insts_to_str(stuff: [constr_arg_general_]) -> str { +fn insts_to_str(stuff: [constr_arg_general_]/~) -> str { let mut rslt = "<"; for stuff.each {|i| rslt += @@ -696,25 +697,25 @@ fn insts_to_str(stuff: [constr_arg_general_]) -> str { alt i { carg_ident(p) { *p.ident } carg_base { "*" } - carg_lit(_) { "[lit]" } + carg_lit(_) { "[lit]/~" } } + " "; } rslt += ">"; rslt } -fn replace(subst: subst, d: pred_args) -> [constr_arg_general_] { - let mut rslt: [constr_arg_general_] = []; +fn replace(subst: subst, d: pred_args) -> [constr_arg_general_]/~ { + let mut rslt: [constr_arg_general_]/~ = []/~; for d.node.args.each {|c| alt c.node { carg_ident(p) { alt find_in_subst(p.node, subst) { - some(newv) { rslt += [carg_ident(newv)]; } - _ { rslt += [c.node]; } + some(newv) { rslt += [carg_ident(newv)]/~; } + _ { rslt += [c.node]/~; } } } _ { - rslt += [c.node]; + rslt += [c.node]/~; } } } @@ -796,11 +797,11 @@ fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate, ty: oper_type) { let mut subst; alt ty { - oper_swap { subst = [{from: dest, to: src}, {from: src, to: dest}]; } + oper_swap { subst = [{from: dest, to: src}, {from: src, to: dest}]/~; } oper_assign_op { ret; // Don't do any propagation } - _ { subst = [{from: src, to: dest}]; } + _ { subst = [{from: src, to: dest}]/~; } } @@ -845,19 +846,19 @@ fn forget_in_poststate(fcx: fn_ctxt, p: poststate, dead_v: node_id) -> bool { ret changed; } -fn any_eq(v: [node_id], d: node_id) -> bool { +fn any_eq(v: [node_id]/~, d: node_id) -> bool { for v.each {|i| if i == d { ret true; } } false } fn constraint_mentions(_fcx: fn_ctxt, c: norm_constraint, v: node_id) -> bool { - ret args_mention(c.c.node.args, any_eq, [v]); + ret args_mention(c.c.node.args, any_eq, [v]/~); } -fn args_mention(args: [@constr_arg_use], - q: fn([T], node_id) -> bool, - s: [T]) -> bool { +fn args_mention(args: [@constr_arg_use]/~, + q: fn([T]/~, node_id) -> bool, + s: [T]/~) -> bool { for args.each {|a| alt a.node { carg_ident(p1) { if q(s, p1.node) { ret true; } } _ { } } @@ -865,7 +866,7 @@ fn args_mention(args: [@constr_arg_use], ret false; } -fn use_var(fcx: fn_ctxt, v: node_id) { *fcx.enclosing.used_vars += [v]; } +fn use_var(fcx: fn_ctxt, v: node_id) { *fcx.enclosing.used_vars += [v]/~; } fn op_to_oper_ty(io: init_op) -> oper_type { alt io { init_move { oper_move } _ { oper_assign } } @@ -878,9 +879,10 @@ fn do_nothing(_fk: visit::fn_kind, _decl: fn_decl, _body: blk, } -fn args_to_constr_args(tcx: ty::ctxt, args: [arg], - indices: [@sp_constr_arg]) -> [@constr_arg_use] { - let mut actuals: [@constr_arg_use] = []; +fn args_to_constr_args(tcx: ty::ctxt, args: [arg]/~, + indices: [@sp_constr_arg]/~) + -> [@constr_arg_use]/~ { + let mut actuals: [@constr_arg_use]/~ = []/~; let num_args = vec::len(args); for indices.each {|a| actuals += @@ -898,12 +900,12 @@ fn args_to_constr_args(tcx: ty::ctxt, args: [arg], } } carg_lit(l) { carg_lit(l) } - })]; + })]/~; } ret actuals; } -fn ast_constr_to_ts_constr(tcx: ty::ctxt, args: [arg], c: @constr) -> +fn ast_constr_to_ts_constr(tcx: ty::ctxt, args: [arg]/~, c: @constr) -> tsconstr { let tconstr = ty::ast_constr_to_constr(tcx, c); ret {path: tconstr.node.path, @@ -911,35 +913,35 @@ fn ast_constr_to_ts_constr(tcx: ty::ctxt, args: [arg], c: @constr) -> args: args_to_constr_args(tcx, args, tconstr.node.args)}; } -fn ast_constr_to_sp_constr(tcx: ty::ctxt, args: [arg], c: @constr) -> +fn ast_constr_to_sp_constr(tcx: ty::ctxt, args: [arg]/~, c: @constr) -> sp_constr { let tconstr = ast_constr_to_ts_constr(tcx, args, c); ret respan(c.span, tconstr); } -type binding = {lhs: [dest], rhs: option}; +type binding = {lhs: [dest]/~, rhs: option}; fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding { - let mut lhs = []; + let mut lhs = []/~; pat_bindings(tcx.def_map, loc.node.pat) {|p_id, _s, name| - lhs += [local_dest({ident: path_to_ident(name), node: p_id})]; + lhs += [local_dest({ident: path_to_ident(name), node: p_id})]/~; }; {lhs: lhs, rhs: loc.node.init} } -fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] { - let mut rslt = []; - for locals.each {|loc| rslt += [local_to_bindings(tcx, loc)]; } +fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]/~) -> [binding]/~ { + let mut rslt = []/~; + for locals.each {|loc| rslt += [local_to_bindings(tcx, loc)]/~; } ret rslt; } -fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode] { +fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode]/~ { let ty = ty::type_autoderef(fcx.ccx.tcx, ty::node_id_to_type(fcx.ccx.tcx, callee)); alt ty::get(ty).struct { ty::ty_fn({inputs: args, _}) { - let mut modes = []; - for args.each {|arg| modes += [arg.mode]; } + let mut modes = []/~; + for args.each {|arg| modes += [arg.mode]/~; } ret modes; } _ { @@ -950,7 +952,7 @@ fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode] { } } -fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op] { +fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op]/~ { vec::map(callee_modes(fcx, callee)) {|m| alt ty::resolved_mode(fcx.ccx.tcx, m) { by_move { init_move } @@ -959,11 +961,12 @@ fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op] { } } -fn arg_bindings(ops: [init_op], es: [@expr]) -> [binding] { - let mut bindings: [binding] = []; +fn arg_bindings(ops: [init_op]/~, es: [@expr]/~) -> [binding]/~ { + let mut bindings: [binding]/~ = []/~; let mut i = 0u; for ops.each {|op| - bindings += [{lhs: [call], rhs: some({op: op, expr: es[i]})}]; + vec::push(bindings, + {lhs: [call]/~, rhs: some({op: op, expr: es[i]})}); i += 1u; } ret bindings; diff --git a/src/rustc/middle/tstate/bitvectors.rs b/src/rustc/middle/tstate/bitvectors.rs index 4524684bc60..c1ca98d5f90 100644 --- a/src/rustc/middle/tstate/bitvectors.rs +++ b/src/rustc/middle/tstate/bitvectors.rs @@ -45,7 +45,7 @@ fn seq_tritv(p: postcond, q: postcond) { } } -fn seq_postconds(fcx: fn_ctxt, ps: [postcond]) -> postcond { +fn seq_postconds(fcx: fn_ctxt, ps: [postcond]/~) -> postcond { let sz = vec::len(ps); if sz >= 1u { let prev = tritv_clone(ps[0]); @@ -58,11 +58,11 @@ fn seq_postconds(fcx: fn_ctxt, ps: [postcond]) -> postcond { // return the precondition for evaluating each expr in order. // So, if e0's post is {x} and e1's pre is {x, y, z}, the entire // precondition shouldn't include x. -fn seq_preconds(fcx: fn_ctxt, pps: [pre_and_post]) -> precond { +fn seq_preconds(fcx: fn_ctxt, pps: [pre_and_post]/~) -> precond { let sz: uint = vec::len(pps); let num_vars: uint = num_constraints(fcx.enclosing); - fn seq_preconds_go(fcx: fn_ctxt, pps: [pre_and_post], + fn seq_preconds_go(fcx: fn_ctxt, pps: [pre_and_post]/~, idx: uint, first: pre_and_post) -> precond { let mut idx = idx; diff --git a/src/rustc/middle/tstate/collect_locals.rs b/src/rustc/middle/tstate/collect_locals.rs index 33a5b1ab0b0..bf22f4287bb 100644 --- a/src/rustc/middle/tstate/collect_locals.rs +++ b/src/rustc/middle/tstate/collect_locals.rs @@ -10,12 +10,12 @@ import aux::*; import std::map::hashmap; import dvec::{dvec, extensions}; -type ctxt = {cs: @mut [sp_constr], tcx: ty::ctxt}; +type ctxt = {cs: @mut [sp_constr]/~, tcx: ty::ctxt}; fn collect_pred(e: @expr, cx: ctxt, v: visit::vt) { alt e.node { - expr_check(_, ch) { *cx.cs += [expr_to_constr(cx.tcx, ch)]; } - expr_if_check(ex, _, _) { *cx.cs += [expr_to_constr(cx.tcx, ex)]; } + expr_check(_, ch) { *cx.cs += [expr_to_constr(cx.tcx, ch)]/~; } + expr_if_check(ex, _, _) { *cx.cs += [expr_to_constr(cx.tcx, ex)]/~; } // If it's a call, generate appropriate instances of the // call's constraints. @@ -24,7 +24,7 @@ fn collect_pred(e: @expr, cx: ctxt, v: visit::vt) { let ct: sp_constr = respan(c.span, aux::substitute_constr_args(cx.tcx, operands, c)); - *cx.cs += [ct]; + *cx.cs += [ct]/~; } } _ { } @@ -39,7 +39,7 @@ fn find_locals(tcx: ty::ctxt, f_body: blk, sp: span, id: node_id) -> ctxt { - let cx: ctxt = {cs: @mut [], tcx: tcx}; + let cx: ctxt = {cs: @mut []/~, tcx: tcx}; let visitor = visit::default_visitor::(); let visitor = @{visit_expr: collect_pred, @@ -130,7 +130,7 @@ fn mk_fn_info(ccx: crate_ctxt, } } - let v: @mut [node_id] = @mut []; + let v: @mut [node_id]/~ = @mut []/~; let rslt = {constrs: res_map, num_constraints: next, diff --git a/src/rustc/middle/tstate/pre_post_conditions.rs b/src/rustc/middle/tstate/pre_post_conditions.rs index aa7a3d6e344..cc157ff4356 100644 --- a/src/rustc/middle/tstate/pre_post_conditions.rs +++ b/src/rustc/middle/tstate/pre_post_conditions.rs @@ -63,7 +63,7 @@ fn find_pre_post_item(ccx: crate_ctxt, i: item) { sets the precondition in a to be the result of combining the preconditions for , and the postcondition in a to be the union of all postconditions for */ -fn find_pre_post_exprs(fcx: fn_ctxt, args: [@expr], id: node_id) { +fn find_pre_post_exprs(fcx: fn_ctxt, args: [@expr]/~, id: node_id) { if vec::len::<@expr>(args) > 0u { #debug["find_pre_post_exprs: oper = %s", expr_to_str(args[0])]; } @@ -84,7 +84,8 @@ fn find_pre_post_loop(fcx: fn_ctxt, index: @expr, body: blk, id: node_id) { find_pre_post_block(fcx, body); let loop_precond = - seq_preconds(fcx, [expr_pp(fcx.ccx, index), block_pp(fcx.ccx, body)]); + seq_preconds(fcx, [expr_pp(fcx.ccx, index), + block_pp(fcx.ccx, body)]/~); let loop_postcond = intersect_states(expr_postcond(fcx.ccx, index), block_postcond(fcx.ccx, body)); @@ -111,7 +112,7 @@ fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk, let precond_res = seq_preconds(fcx, [expr_pp(fcx.ccx, antec), - block_pp(fcx.ccx, conseq)]); + block_pp(fcx.ccx, conseq)]/~); set_pre_and_post(fcx.ccx, id, precond_res, expr_poststate(fcx.ccx, antec)); } @@ -124,11 +125,12 @@ fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk, find_pre_post_expr(fcx, altern); let precond_false_case = seq_preconds(fcx, - [expr_pp(fcx.ccx, antec), expr_pp(fcx.ccx, altern)]); + [expr_pp(fcx.ccx, antec), + expr_pp(fcx.ccx, altern)]/~); let postcond_false_case = seq_postconds(fcx, [expr_postcond(fcx.ccx, antec), - expr_postcond(fcx.ccx, altern)]); + expr_postcond(fcx.ccx, altern)]/~); /* Be sure to set the bit for the check condition here, so that it's *not* set in the alternative. */ @@ -142,14 +144,14 @@ fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk, let precond_true_case = seq_preconds(fcx, [expr_pp(fcx.ccx, antec), - block_pp(fcx.ccx, conseq)]); + block_pp(fcx.ccx, conseq)]/~); let postcond_true_case = seq_postconds(fcx, [expr_postcond(fcx.ccx, antec), - block_postcond(fcx.ccx, conseq)]); + block_postcond(fcx.ccx, conseq)]/~); let precond_res = - seq_postconds(fcx, [precond_true_case, precond_false_case]); + seq_postconds(fcx, [precond_true_case, precond_false_case]/~); let postcond_res = intersect_states(postcond_true_case, postcond_false_case); set_pre_and_post(fcx.ccx, id, precond_res, postcond_res); @@ -168,10 +170,10 @@ fn gen_if_local(fcx: fn_ctxt, lhs: @expr, rhs: @expr, larger_id: node_id, set_pre_and_post(fcx.ccx, larger_id, p.precondition, p.postcondition); } - _ { find_pre_post_exprs(fcx, [lhs, rhs], larger_id); } + _ { find_pre_post_exprs(fcx, [lhs, rhs]/~, larger_id); } } } - _ { find_pre_post_exprs(fcx, [lhs, rhs], larger_id); } + _ { find_pre_post_exprs(fcx, [lhs, rhs]/~, larger_id); } } } @@ -226,8 +228,8 @@ fn handle_update(fcx: fn_ctxt, parent: @expr, lhs: @expr, rhs: @expr, } } -fn forget_args_moved_in(fcx: fn_ctxt, parent: @expr, modes: [mode], - operands: [@expr]) { +fn forget_args_moved_in(fcx: fn_ctxt, parent: @expr, modes: [mode]/~, + operands: [@expr]/~) { vec::iteri(modes) {|i,mode| alt ty::resolved_mode(fcx.ccx.tcx, mode) { by_move { forget_in_postcond(fcx, parent.id, operands[i].id); } @@ -253,7 +255,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { /* copy */ let mut args = operands; - args += [operator]; + args += [operator]/~; find_pre_post_exprs(fcx, args, e.id); /* see if the call has any constraints on its type */ @@ -285,10 +287,10 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { clear_pp(rslt); } expr_new(p, _, v) { - find_pre_post_exprs(fcx, [p, v], e.id); + find_pre_post_exprs(fcx, [p, v]/~, e.id); } expr_log(_, lvl, arg) { - find_pre_post_exprs(fcx, [lvl, arg], e.id); + find_pre_post_exprs(fcx, [lvl, arg]/~, e.id); } expr_fn(_, _, _, cap_clause) | expr_fn_block(_, _, cap_clause) { find_pre_post_expr_fn_upvars(fcx, e); @@ -312,7 +314,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { } expr_rec(fields, maybe_base) { let mut es = field_exprs(fields); - alt maybe_base { none {/* no-op */ } some(b) { es += [b]; } } + alt maybe_base { none {/* no-op */ } some(b) { es += [b]/~; } } find_pre_post_exprs(fcx, es, e.id); } expr_tup(elts) { find_pre_post_exprs(fcx, elts, e.id); } @@ -323,7 +325,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { /* Different from expr_assign in that the lhs *must* already be initialized */ - find_pre_post_exprs(fcx, [lhs, rhs], e.id); + find_pre_post_exprs(fcx, [lhs, rhs]/~, e.id); forget_in_postcond(fcx, e.id, lhs.id); } expr_lit(_) { clear_pp(expr_pp(fcx.ccx, e)); } @@ -349,11 +351,12 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, l); find_pre_post_expr(fcx, r); let overall_pre = - seq_preconds(fcx, [expr_pp(fcx.ccx, l), expr_pp(fcx.ccx, r)]); + seq_preconds(fcx, + [expr_pp(fcx.ccx, l), expr_pp(fcx.ccx, r)]/~); set_precondition(node_id_to_ts_ann(fcx.ccx, e.id), overall_pre); set_postcondition(node_id_to_ts_ann(fcx.ccx, e.id), expr_postcond(fcx.ccx, l)); - } else { find_pre_post_exprs(fcx, [l, r], e.id); } + } else { find_pre_post_exprs(fcx, [l, r]/~, e.id); } } expr_addr_of(_, x) | expr_cast(x, _) | expr_unary(_, x) | expr_loop_body(x) | expr_do_body(x) | expr_assert(x) | expr_copy(x) { @@ -366,7 +369,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { set_pre_and_post(fcx.ccx, e.id, seq_preconds(fcx, [expr_pp(fcx.ccx, test), - block_pp(fcx.ccx, body)]), + block_pp(fcx.ccx, body)]/~), intersect_states(expr_postcond(fcx.ccx, test), block_postcond(fcx.ccx, body))); } @@ -383,7 +386,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { set_pre_and_post(fcx.ccx, e.id, block_precond(fcx.ccx, body), loop_postcond); } - expr_index(val, sub) { find_pre_post_exprs(fcx, [val, sub], e.id); } + expr_index(val, sub) { find_pre_post_exprs(fcx, [val, sub]/~, e.id); } expr_alt(ex, alts, _) { find_pre_post_expr(fcx, ex); fn do_an_alt(fcx: fn_ctxt, an_alt: arm) -> pre_and_post { @@ -394,11 +397,11 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) { find_pre_post_block(fcx, an_alt.body); ret block_pp(fcx.ccx, an_alt.body); } - let mut alt_pps = []; - for alts.each {|a| alt_pps += [do_an_alt(fcx, a)]; } + let mut alt_pps = []/~; + for alts.each {|a| alt_pps += [do_an_alt(fcx, a)]/~; } fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post, &&next: pre_and_post) -> pre_and_post { - union(pp.precondition, seq_preconds(fcx, [antec, next])); + union(pp.precondition, seq_preconds(fcx, [antec, next]/~)); intersect(pp.postcondition, next.postcondition); ret pp; } @@ -494,7 +497,7 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) { guaranteed */ let e_pp = expr_pp(fcx.ccx, an_init.expr); tritv_copy(prev_pp.precondition, - seq_preconds(fcx, [prev_pp, e_pp])); + seq_preconds(fcx, [prev_pp, e_pp]/~)); /* Include the LHSs too, since those aren't in the postconds of the RHSs themselves */ @@ -551,21 +554,21 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) { let do_inner = {|a|do_inner_(fcx, a)}; option::map::<@expr, ()>(b.node.expr, do_inner); - let mut pps: [pre_and_post] = []; - for b.node.stmts.each {|s| pps += [stmt_pp(fcx.ccx, *s)]; } + let mut pps: [pre_and_post]/~ = []/~; + for b.node.stmts.each {|s| pps += [stmt_pp(fcx.ccx, *s)]/~; } alt b.node.expr { none {/* no-op */ } - some(e) { pps += [expr_pp(fcx.ccx, e)]; } + some(e) { pps += [expr_pp(fcx.ccx, e)]/~; } } let block_precond = seq_preconds(fcx, pps); - let mut postconds = []; - for pps.each {|pp| postconds += [get_post(pp)]; } + let mut postconds = []/~; + for pps.each {|pp| postconds += [get_post(pp)]/~; } /* A block may be empty, so this next line ensures that the postconds vector is non-empty. */ - postconds += [block_precond]; + postconds += [block_precond]/~; let mut block_postcond = empty_poststate(nv); /* conservative approximation */ diff --git a/src/rustc/middle/tstate/states.rs b/src/rustc/middle/tstate/states.rs index 4883422a908..64748a43b99 100644 --- a/src/rustc/middle/tstate/states.rs +++ b/src/rustc/middle/tstate/states.rs @@ -54,7 +54,7 @@ fn handle_move_or_copy(fcx: fn_ctxt, post: poststate, rhs_path: @path, } } -fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) -> +fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]/~) -> {changed: bool, post: poststate} { let mut changed = false; let mut post = tritv_clone(pres); @@ -166,7 +166,7 @@ fn find_pre_post_state_two(fcx: fn_ctxt, pres: prestate, lhs: @expr, } fn find_pre_post_state_call(fcx: fn_ctxt, pres: prestate, a: @expr, - id: node_id, ops: [init_op], bs: [@expr], + id: node_id, ops: [init_op]/~, bs: [@expr]/~, cf: ret_style) -> bool { let mut changed = find_pre_post_state_expr(fcx, pres, a); // FIXME (#2178): This could be a typestate constraint (except we're @@ -183,8 +183,8 @@ fn find_pre_post_state_call(fcx: fn_ctxt, pres: prestate, a: @expr, } fn find_pre_post_state_exprs(fcx: fn_ctxt, pres: prestate, id: node_id, - ops: [init_op], es: [@expr], cf: ret_style) -> - bool { + ops: [init_op]/~, es: [@expr]/~, + cf: ret_style) -> bool { let rs = seq_states(fcx, pres, arg_bindings(ops, es)); let mut changed = rs.changed | set_prestate_ann(fcx.ccx, id, pres); /* if this is a failing call, it sets everything as initialized */ @@ -404,7 +404,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool { /* conservative approximation: if a loop contains a break or cont, we assume nothing about the poststate */ - /* which is still unsound -- see [Break-unsound] */ + /* which is still unsound -- see [Break-unsound]/~ */ if has_nonlocal_exits(body) { ret changed | set_poststate_ann(fcx.ccx, e.id, pres); } else { @@ -423,7 +423,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool { /* conservative approximation: if a loop contains a break or cont, we assume nothing about the poststate (so, we set all predicates to "don't know" */ - /* which is still unsound -- see [Break-unsound] */ + /* which is still unsound -- see [Break-unsound]/~ */ if may_break(body) { /* Only do this if there are *breaks* not conts. An infinite loop with conts is still an infinite loop. diff --git a/src/rustc/middle/tstate/tritv.rs b/src/rustc/middle/tstate/tritv.rs index 7bcc4840d25..b66cfe00b3f 100644 --- a/src/rustc/middle/tstate/tritv.rs +++ b/src/rustc/middle/tstate/tritv.rs @@ -249,16 +249,16 @@ fn tritv_doesntcare(v: t) -> bool { ret true; } -fn to_vec(v: t) -> [uint] { +fn to_vec(v: t) -> [uint]/~ { let mut i: uint = 0u; - let mut rslt: [uint] = []; + let mut rslt: [uint]/~ = []/~; while i < v.nbits { rslt += [alt tritv_get(v, i) { dont_care { 2u } ttrue { 1u } tfalse { 0u } - }]; + }]/~; i += 1u; } ret rslt; diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index c5aa42186bc..d766f8b2960 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -172,15 +172,15 @@ type arg = {mode: ast::mode, ty: t}; type field = {ident: ast::ident, mt: mt}; -type param_bounds = @[param_bound]; +type param_bounds = @[param_bound]/~; type method = {ident: ast::ident, - tps: @[param_bounds], + tps: @[param_bounds]/~, fty: fn_ty, purity: ast::purity, vis: ast::visibility}; -type constr_table = hashmap; +type constr_table = hashmap; type mt = {ty: t, mutbl: ast::mutability}; @@ -234,7 +234,7 @@ type ctxt = // of this node. This only applies to nodes that refer to entities // parameterized by type parameters, such as generic fns, types, or // other items. - node_type_substs: hashmap, + node_type_substs: hashmap, items: ast_map::map, intrinsic_ifaces: hashmap, @@ -246,8 +246,8 @@ type ctxt = needs_unwind_cleanup_cache: hashmap, kind_cache: hashmap, ast_ty_to_ty_cache: hashmap<@ast::ty, ast_ty_to_ty_cache_entry>, - enum_var_cache: hashmap, - iface_method_cache: hashmap, + enum_var_cache: hashmap, + iface_method_cache: hashmap, ty_param_bounds: hashmap, inferred_modes: hashmap, // maps the id of borrowed expr to scope of borrowed ptr @@ -305,10 +305,10 @@ enum closure_kind { type fn_ty = {purity: ast::purity, proto: ast::proto, - inputs: [arg], + inputs: [arg]/~, output: t, ret_style: ret_style, - constraints: [@constr]}; + constraints: [@constr]/~}; // See discussion at head of region.rs enum region { @@ -340,7 +340,7 @@ type opt_region = option; type substs = { self_r: opt_region, self_ty: option, - tps: [t] + tps: [t]/~ }; // NB: If you change this, you'll probably want to change the corresponding @@ -361,18 +361,18 @@ enum sty { ty_evec(mt, vstore), ty_ptr(mt), ty_rptr(region, mt), - ty_rec([field]), + ty_rec([field]/~), ty_fn(fn_ty), ty_iface(def_id, substs), ty_class(def_id, substs), - ty_tup([t]), + ty_tup([t]/~), ty_var(tv_vid), // type variable during typechecking ty_var_integral(tvi_vid), // type variable during typechecking, for // integral types only ty_param(uint, def_id), // type parameter ty_self, // special, implicit `self` type parameter - ty_constr(t, [@type_constr]), + ty_constr(t, [@type_constr]/~), // "Fake" types, used for trans purposes ty_type, // type_desc* @@ -471,7 +471,7 @@ fn param_bounds_to_kind(bounds: param_bounds) -> kind { kind } -type ty_param_bounds_and_ty = {bounds: @[param_bounds], +type ty_param_bounds_and_ty = {bounds: @[param_bounds]/~, rp: ast::region_param, ty: t}; @@ -691,13 +691,13 @@ fn mk_mut_unboxed_vec(cx: ctxt, ty: t) -> t { } -fn mk_rec(cx: ctxt, fs: [field]) -> t { mk_t(cx, ty_rec(fs)) } +fn mk_rec(cx: ctxt, fs: [field]/~) -> t { mk_t(cx, ty_rec(fs)) } -fn mk_constr(cx: ctxt, t: t, cs: [@type_constr]) -> t { +fn mk_constr(cx: ctxt, t: t, cs: [@type_constr]/~) -> t { mk_t(cx, ty_constr(t, cs)) } -fn mk_tup(cx: ctxt, ts: [t]) -> t { mk_t(cx, ty_tup(ts)) } +fn mk_tup(cx: ctxt, ts: [t]/~) -> t { mk_t(cx, ty_tup(ts)) } fn mk_fn(cx: ctxt, fty: fn_ty) -> t { mk_t(cx, ty_fn(fty)) } @@ -992,7 +992,7 @@ fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t { } // Substitute *only* type parameters. Used in trans where regions are erased. -fn subst_tps(cx: ctxt, tps: [t], typ: t) -> t { +fn subst_tps(cx: ctxt, tps: [t]/~, typ: t) -> t { if tps.len() == 0u { ret typ; } let tb = ty::get(typ); if !tbox_has_flag(tb, has_params) { ret typ; } @@ -1598,7 +1598,7 @@ fn type_kind(cx: ctxt, ty: t) -> kind { // True if instantiating an instance of `ty` requires an instance of `r_ty`. fn is_instantiable(cx: ctxt, r_ty: t) -> bool { - fn type_requires(cx: ctxt, seen: @mut [def_id], + fn type_requires(cx: ctxt, seen: @mut [def_id]/~, r_ty: t, ty: t) -> bool { #debug["type_requires(%s, %s)?", ty_to_str(cx, r_ty), @@ -1616,7 +1616,7 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool { ret r; } - fn subtypes_require(cx: ctxt, seen: @mut [def_id], + fn subtypes_require(cx: ctxt, seen: @mut [def_id]/~, r_ty: t, ty: t) -> bool { #debug["subtypes_require(%s, %s)?", ty_to_str(cx, r_ty), @@ -1713,7 +1713,7 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool { ret r; } - let seen = @mut []; + let seen = @mut []/~; !subtypes_require(cx, seen, r_ty, r_ty) } @@ -1973,7 +1973,7 @@ fn hash_type_structure(st: sty) -> uint { (h << 2u) + (did.node as uint) } fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) } - fn hash_subtys(id: uint, subtys: [t]) -> uint { + fn hash_subtys(id: uint, subtys: [t]/~) -> uint { let mut h = id; for subtys.each {|s| h = (h << 2u) + type_id(s) } h @@ -2094,8 +2094,8 @@ fn arg_eq(eq: fn(T, T) -> bool, } fn args_eq(eq: fn(T, T) -> bool, - a: [@sp_constr_arg], - b: [@sp_constr_arg]) -> bool { + a: [@sp_constr_arg]/~, + b: [@sp_constr_arg]/~) -> bool { let mut i: uint = 0u; for a.each {|arg| if !arg_eq(eq, arg, b[i]) { ret false; } @@ -2110,7 +2110,7 @@ fn constr_eq(c: @constr, d: @constr) -> bool { args_eq(eq_int, c.node.args, d.node.args); } -fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool { +fn constrs_eq(cs: [@constr]/~, ds: [@constr]/~) -> bool { if vec::len(cs) != vec::len(ds) { ret false; } let mut i = 0u; for cs.each {|c| if !constr_eq(c, ds[i]) { ret false; } i += 1u; } @@ -2125,9 +2125,9 @@ fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t { } } -fn node_id_to_type_params(cx: ctxt, id: ast::node_id) -> [t] { +fn node_id_to_type_params(cx: ctxt, id: ast::node_id) -> [t]/~ { alt cx.node_type_substs.find(id) { - none { ret []; } + none { ret []/~; } some(ts) { ret ts; } } } @@ -2137,7 +2137,7 @@ fn node_id_has_type_params(cx: ctxt, id: ast::node_id) -> bool { } // Type accessors for substructures of types -fn ty_fn_args(fty: t) -> [arg] { +fn ty_fn_args(fty: t) -> [arg]/~ { alt get(fty).struct { ty_fn(f) { f.inputs } _ { fail "ty_fn_args() called on non-fn type"; } @@ -2173,8 +2173,8 @@ fn is_fn_ty(fty: t) -> bool { } // Returns a vec of all the input and output types of fty. -fn tys_in_fn_ty(fty: fn_ty) -> [t] { - fty.inputs.map({|a| a.ty}) + [fty.output] +fn tys_in_fn_ty(fty: fn_ty) -> [t]/~ { + fty.inputs.map({|a| a.ty}) + [fty.output]/~ } // Just checks whether it's a fn that returns bool, @@ -2222,7 +2222,8 @@ fn expr_ty(cx: ctxt, expr: @ast::expr) -> t { ret node_id_to_type(cx, expr.id); } -fn expr_ty_params_and_ty(cx: ctxt, expr: @ast::expr) -> {params: [t], ty: t} { +fn expr_ty_params_and_ty(cx: ctxt, + expr: @ast::expr) -> {params: [t]/~, ty: t} { ret {params: node_id_to_type_params(cx, expr.id), ty: node_id_to_type(cx, expr.id)}; } @@ -2249,7 +2250,7 @@ fn stmt_node_id(s: @ast::stmt) -> ast::node_id { } } -fn field_idx(id: ast::ident, fields: [field]) -> option { +fn field_idx(id: ast::ident, fields: [field]/~) -> option { let mut i = 0u; for fields.each {|f| if f.ident == id { ret some(i); } i += 1u; } ret none; @@ -2261,13 +2262,13 @@ fn get_field(rec_ty: t, id: ast::ident) -> field { } } -fn get_fields(rec_ty:t) -> [field] { +fn get_fields(rec_ty:t) -> [field]/~ { alt check get(rec_ty).struct { ty_rec(fields) { fields } } } -fn method_idx(id: ast::ident, meths: [method]) -> option { +fn method_idx(id: ast::ident, meths: [method]/~) -> option { let mut i = 0u; for meths.each {|m| if m.ident == id { ret some(i); } i += 1u; } ret none; @@ -2277,10 +2278,10 @@ fn occurs_check(tcx: ctxt, sp: span, vid: tv_vid, rt: t) { // Returns a vec of all the type variables occurring in `ty`. It may // contain duplicates. (Integral type vars aren't counted.) - fn vars_in_type(ty: t) -> [tv_vid] { - let mut rslt = []; + fn vars_in_type(ty: t) -> [tv_vid]/~ { + let mut rslt = []/~; walk_ty(ty) {|ty| - alt get(ty).struct { ty_var(v) { rslt += [v]; } _ { } } + alt get(ty).struct { ty_var(v) { rslt += [v]/~; } _ { } } } rslt } @@ -2501,11 +2502,11 @@ fn def_has_ty_params(def: ast::def) -> bool { } } -fn store_iface_methods(cx: ctxt, id: ast::node_id, ms: @[method]) { +fn store_iface_methods(cx: ctxt, id: ast::node_id, ms: @[method]/~) { cx.iface_method_cache.insert(ast_util::local_def(id), ms); } -fn iface_methods(cx: ctxt, id: ast::def_id) -> @[method] { +fn iface_methods(cx: ctxt, id: ast::def_id) -> @[method]/~ { alt cx.iface_method_cache.find(id) { some(ms) { ret ms; } _ {} @@ -2553,12 +2554,12 @@ fn ty_to_def_id(ty: t) -> option { } // Enum information -type variant_info = @{args: [t], ctor_ty: t, name: ast::ident, +type variant_info = @{args: [t]/~, ctor_ty: t, name: ast::ident, id: ast::def_id, disr_val: int}; fn substd_enum_variants(cx: ctxt, id: ast::def_id, - substs: substs) -> [variant_info] { + substs: substs) -> [variant_info]/~ { vec::map(*enum_variants(cx, id)) { |variant_info| let substd_args = vec::map(variant_info.args) {|aty| subst(cx, substs, aty) @@ -2609,26 +2610,26 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { ast_map::path_name(item.ident) } }; - *path + [item_elt] + *path + [item_elt]/~ } ast_map::node_native_item(nitem, _, path) { - *path + [ast_map::path_name(nitem.ident)] + *path + [ast_map::path_name(nitem.ident)]/~ } ast_map::node_method(method, _, path) { - *path + [ast_map::path_name(method.ident)] + *path + [ast_map::path_name(method.ident)]/~ } ast_map::node_variant(variant, _, path) { - vec::init(*path) + [ast_map::path_name(variant.node.name)] + vec::init(*path) + [ast_map::path_name(variant.node.name)]/~ } ast_map::node_ctor(nm, _, _, _, path) { - *path + [ast_map::path_name(nm)] + *path + [ast_map::path_name(nm)]/~ } ast_map::node_dtor(_, _, _, path) { - *path + [ast_map::path_name(@"dtor")] + *path + [ast_map::path_name(@"dtor")]/~ } @@ -2645,7 +2646,7 @@ fn enum_is_univariant(cx: ctxt, id: ast::def_id) -> bool { vec::len(*enum_variants(cx, id)) == 1u } -fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info] { +fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info]/~ { alt cx.enum_var_cache.find(id) { some(variants) { ret variants; } _ { /* fallthrough */ } @@ -2667,7 +2668,7 @@ fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info] { let arg_tys = { if vec::len(variant.node.args) > 0u { ty_fn_args(ctor_ty).map { |a| a.ty } - } else { [] } + } else { []/~ } }; alt variant.node.disr_expr { some (ex) { @@ -2751,7 +2752,7 @@ fn lookup_field_type(tcx: ctxt, class_id: def_id, id: def_id, // Look up the list of field names and IDs for a given class // Fails if the id is not bound to a class. -fn lookup_class_fields(cx: ctxt, did: ast::def_id) -> [field_ty] { +fn lookup_class_fields(cx: ctxt, did: ast::def_id) -> [field_ty]/~ { if did.crate == ast::local_crate { alt cx.items.find(did.node) { some(ast_map::node_item(i,_)) { @@ -2782,7 +2783,7 @@ fn lookup_class_field(cx: ctxt, parent: ast::def_id, field_id: ast::def_id) } } -fn lookup_public_fields(cx: ctxt, did: ast::def_id) -> [field_ty] { +fn lookup_public_fields(cx: ctxt, did: ast::def_id) -> [field_ty]/~ { vec::filter(lookup_class_fields(cx, did), is_public) } @@ -2796,7 +2797,7 @@ pure fn is_public(f: field_ty) -> bool { // Look up the list of method names and IDs for a given class // Fails if the id is not bound to a class. fn lookup_class_method_ids(cx: ctxt, did: ast::def_id) - : is_local(did) -> [{name: ident, id: node_id, vis: visibility}] { + : is_local(did) -> [{name: ident, id: node_id, vis: visibility}]/~ { alt cx.items.find(did.node) { some(ast_map::node_item(@{node: item_class(_,_,items,_,_,_), _}, _)) { let (_,ms) = split_class_items(items); @@ -2831,13 +2832,13 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident, } } -fn class_field_tys(items: [@class_member]) -> [field_ty] { - let mut rslt = []; +fn class_field_tys(items: [@class_member]/~) -> [field_ty]/~ { + let mut rslt = []/~; for items.each {|it| alt it.node { instance_var(nm, _, cm, id, vis) { rslt += [{ident: nm, id: ast_util::local_def(id), - vis: vis, mutability: cm}]; + vis: vis, mutability: cm}]/~; } class_method(_) { } } @@ -2852,14 +2853,14 @@ fn class_field_tys(items: [@class_member]) -> [field_ty] { // mutable, regardless of how they were declared. It's meant to // be used in trans. fn class_items_as_mutable_fields(cx:ctxt, did: ast::def_id, - substs: substs) -> [field] { + substs: substs) -> [field]/~ { class_item_fields(cx, did, substs, {|_mt| m_mutbl}) } // Same as class_items_as_mutable_fields, but doesn't change // mutability. fn class_items_as_fields(cx:ctxt, did: ast::def_id, - substs: substs) -> [field] { + substs: substs) -> [field]/~ { class_item_fields(cx, did, substs, {|mt| alt mt { class_mutable { m_mutbl } class_immutable { m_imm }}}) @@ -2868,14 +2869,14 @@ fn class_items_as_fields(cx:ctxt, did: ast::def_id, fn class_item_fields(cx:ctxt, did: ast::def_id, substs: substs, frob_mutability: fn(class_mutability) -> mutability) - -> [field] { - let mut rslt = []; + -> [field]/~ { + let mut rslt = []/~; for lookup_class_fields(cx, did).each {|f| // consider all instance vars mut, because the // constructor may mutate all vars rslt += [{ident: f.ident, mt: {ty: lookup_field_type(cx, did, f.id, substs), - mutbl: frob_mutability(f.mutability)}}]; + mutbl: frob_mutability(f.mutability)}}]/~; } rslt } @@ -2942,14 +2943,14 @@ fn is_binopable(_cx: ctxt, ty: t, op: ast::binop) -> bool { /*. add, shift, bit . sub, rel, logic . mult, eq, */ - /*other*/ [f, f, f, f, t, t, f, f], - /*bool*/ [f, f, f, f, t, t, t, t], - /*int*/ [t, t, t, t, t, t, t, f], - /*float*/ [t, t, t, f, t, t, f, f], - /*str*/ [t, f, f, f, t, t, f, f], - /*vec*/ [t, f, f, f, t, t, f, f], - /*bot*/ [f, f, f, f, t, t, f, f], - /*struct*/ [t, t, t, t, t, t, t, t]]; + /*other*/ [f, f, f, f, t, t, f, f]/~, + /*bool*/ [f, f, f, f, t, t, t, t]/~, + /*int*/ [t, t, t, t, t, t, t, f]/~, + /*float*/ [t, t, t, f, t, t, f, f]/~, + /*str*/ [t, f, f, f, t, t, f, f]/~, + /*vec*/ [t, f, f, f, t, t, f, f]/~, + /*bot*/ [f, f, f, f, t, t, f, f]/~, + /*struct*/ [t, t, t, t, t, t, t, t]/~]/~; ret tbl[tycat(ty)][opcat(op)]; } @@ -2972,7 +2973,7 @@ fn ast_constr_to_constr(tcx: ctxt, c: @ast::constr_general) -> } } -fn ty_params_to_tys(tcx: ty::ctxt, tps: [ast::ty_param]) -> [t] { +fn ty_params_to_tys(tcx: ty::ctxt, tps: [ast::ty_param]/~) -> [t]/~ { vec::from_fn(tps.len(), {|i| ty::mk_param(tcx, i, ast_util::local_def(tps[i].id)) }) diff --git a/src/rustc/middle/typeck.rs b/src/rustc/middle/typeck.rs index ad1a613e739..95f4e6c0c95 100644 --- a/src/rustc/middle/typeck.rs +++ b/src/rustc/middle/typeck.rs @@ -120,7 +120,7 @@ type method_map_entry = { type method_map = hashmap; // Resolutions for bounds of all parameters, left to right, for a given path. -type vtable_res = @[vtable_origin]; +type vtable_res = @[vtable_origin]/~; enum vtable_origin { /* @@ -128,7 +128,7 @@ enum vtable_origin { from whence comes the vtable, and tys are the type substs. vtable_res is the vtable itself */ - vtable_static(ast::def_id, [ty::t], vtable_res), + vtable_static(ast::def_id, [ty::t]/~, vtable_res), /* Dynamic vtable, comes from a parameter that has a bound on it: fn foo(a: T) -- a's vtable would have a @@ -142,7 +142,7 @@ enum vtable_origin { Dynamic vtable, comes from something known to have an interface type. def_id refers to the iface item, tys are the substs */ - vtable_iface(ast::def_id, [ty::t]), + vtable_iface(ast::def_id, [ty::t]/~), } type vtable_map = hashmap; @@ -163,7 +163,7 @@ fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) { } fn write_substs_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, - +substs: [ty::t]) { + +substs: [ty::t]/~) { if substs.len() > 0u { tcx.node_type_substs.insert(node_id, substs); } @@ -183,7 +183,7 @@ fn lookup_def_ccx(ccx: @crate_ctxt, sp: span, id: ast::node_id) -> ast::def { } fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty { - {bounds: @[], rp: ast::rp_none, ty: t} + {bounds: @[]/~, rp: ast::rp_none, ty: t} } fn require_same_types( @@ -259,7 +259,8 @@ fn check_main_fn_ty(ccx: @crate_ctxt, if !ok { tcx.sess.span_err(main_span, #fmt("Wrong type in main function: found `%s`, \ - expecting `native fn([str]) -> ()` or `native fn() -> ()`", + expecting `native fn([str]/~) -> ()` \ + or `native fn() -> ()`", ty_to_str(tcx, main_t))); } } diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs index c40b86d0530..101f3f6e44f 100644 --- a/src/rustc/middle/typeck/astconv.rs +++ b/src/rustc/middle/typeck/astconv.rs @@ -337,9 +337,9 @@ fn ast_ty_to_ty( "implied fixed length for bound"); } ast::ty_constr(t, cs) { - let mut out_cs = []; + let mut out_cs = []/~; for cs.each {|constr| - out_cs += [ty::ast_constr_to_constr(tcx, constr)]; + out_cs += [ty::ast_constr_to_constr(tcx, constr)]/~; } ty::mk_constr(tcx, ast_ty_to_ty(self, rscope, t), out_cs) } @@ -402,7 +402,7 @@ fn ty_of_arg( {mode: mode, ty: ty} } -type expected_tys = option<{inputs: [ty::arg], +type expected_tys = option<{inputs: [ty::arg]/~, output: ty::t}>; fn ty_of_fn_decl( diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index 5ba7c88d3cd..3ddbc505a28 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -89,7 +89,7 @@ type fn_ctxt = infcx: infer::infer_ctxt, locals: hashmap, - mut blocks: [ast::node_id], // stack of blocks in scope, may be empty + mut blocks: [ast::node_id]/~, // stack of blocks in scope, may be empty in_scope_regions: isr_alist, node_types: smallintmap::smallintmap, @@ -107,7 +107,7 @@ fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t) -> @fn_ctxt { purity: ast::pure_fn, infcx: infer::new_infer_ctxt(ccx.tcx), locals: int_hash(), - mut blocks: [], + mut blocks: []/~, in_scope_regions: @nil, node_types: smallintmap::mk(), node_type_substs: map::int_hash(), @@ -217,7 +217,7 @@ fn check_fn(ccx: @crate_ctxt, purity: purity, infcx: infcx, locals: locals, - mut blocks: [], + mut blocks: []/~, in_scope_regions: isr, node_types: node_types, node_type_substs: node_type_substs, @@ -257,7 +257,7 @@ fn check_fn(ccx: @crate_ctxt, fn gather_locals(fcx: @fn_ctxt, decl: ast::fn_decl, body: ast::blk, - arg_tys: [ty::t]) { + arg_tys: [ty::t]/~) { let tcx = fcx.ccx.tcx; let assign = fn@(nid: ast::node_id, ty_opt: option) { @@ -565,7 +565,7 @@ impl methods for @fn_ctxt { fn do_autoderef(fcx: @fn_ctxt, sp: span, t: ty::t) -> ty::t { let mut t1 = t; - let mut enum_dids = []; + let mut enum_dids = []/~; loop { let sty = structure_of(fcx, sp, t1); @@ -639,9 +639,10 @@ fn check_expr(fcx: @fn_ctxt, expr: @ast::expr, }; } -// determine the `self` type, using fresh variables for all variables declared -// on the impl declaration e.g., `impl for [(A,B)]` would return ($0, $1) -// where $0 and $1 are freshly instantiated type variables. +// determine the `self` type, using fresh variables for all variables +// declared on the impl declaration e.g., `impl for [(A,B)]/~` +// would return ($0, $1) where $0 and $1 are freshly instantiated type +// variables. fn impl_self_ty(fcx: @fn_ctxt, did: ast::def_id) -> ty_param_substs_and_ty { let tcx = fcx.ccx.tcx; @@ -692,7 +693,7 @@ fn impl_self_ty(fcx: @fn_ctxt, did: ast::def_id) -> ty_param_substs_and_ty { // Only for fields! Returns for methods> // Indifferent to privacy flags fn lookup_field_ty(tcx: ty::ctxt, class_id: ast::def_id, - items:[ty::field_ty], fieldname: ast::ident, + items:[ty::field_ty]/~, fieldname: ast::ident, substs: ty::substs) -> option { let o_field = vec::find(items, {|f| f.ident == fieldname}); @@ -713,7 +714,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // expressions. fn check_call_or_bind( fcx: @fn_ctxt, sp: span, call_expr_id: ast::node_id, in_fty: ty::t, - args: [option<@ast::expr>]) -> {fty: ty::t, bot: bool} { + args: [option<@ast::expr>]/~) -> {fty: ty::t, bot: bool} { let mut bot = false; @@ -780,7 +781,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // functions. This is so that we have more information about the types // of arguments when we typecheck the functions. This isn't really the // right way to do this. - for [false, true].each { |check_blocks| + for [false, true]/~.each { |check_blocks| for args.eachi {|i, a_opt| alt a_opt { some(a) { @@ -816,7 +817,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // A generic function for doing all of the checking for call expressions fn check_call(fcx: @fn_ctxt, sp: span, call_expr_id: ast::node_id, - f: @ast::expr, args: [@ast::expr]) -> bool { + f: @ast::expr, args: [@ast::expr]/~) -> bool { let mut bot = check_expr(fcx, f, none); let fn_ty = fcx.expr_ty(f); @@ -889,11 +890,11 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, } fn lookup_op_method(fcx: @fn_ctxt, op_ex: @ast::expr, self_ex: @ast::expr, self_t: ty::t, - opname: str, args: [option<@ast::expr>]) + opname: str, args: [option<@ast::expr>]/~) -> option<(ty::t, bool)> { let callee_id = ast_util::op_expr_callee_id(op_ex); let lkup = method::lookup(fcx, op_ex, self_ex, op_ex.id, - callee_id, @opname, self_t, [], false); + callee_id, @opname, self_t, []/~, false); alt lkup.method() { some(origin) { let {fty: method_ty, bot: bot} = { @@ -964,7 +965,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, some(name) { alt lookup_op_method(fcx, ex, lhs_expr, lhs_resolved_t, - name, [some(rhs)]) { + name, [some(rhs)]/~) { some(pair) { ret pair; } _ {} } @@ -982,7 +983,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, fn check_user_unop(fcx: @fn_ctxt, op_str: str, mname: str, ex: @ast::expr, rhs_expr: @ast::expr, rhs_t: ty::t) -> ty::t { - alt lookup_op_method(fcx, ex, rhs_expr, rhs_t, mname, []) { + alt lookup_op_method(fcx, ex, rhs_expr, rhs_t, mname, []/~) { some((ret_ty, _)) { ret_ty } _ { fcx.ccx.tcx.sess.span_err( @@ -1415,7 +1416,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, fcx.write_ty(id, typ); } ast::expr_tup(elts) { - let mut elt_ts = []; + let mut elt_ts = []/~; vec::reserve(elt_ts, vec::len(elts)); let flds = unpack_expected(fcx, expected) {|sty| alt sty { ty::ty_tup(flds) { some(flds) } _ { none } } @@ -1423,7 +1424,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, for elts.eachi {|i, e| check_expr(fcx, e, flds.map {|fs| fs[i]}); let ety = fcx.expr_ty(e); - elt_ts += [ety]; + elt_ts += [ety]/~; } let typ = ty::mk_tup(tcx, elt_ts); fcx.write_ty(id, typ); @@ -1574,7 +1575,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let resolved = structurally_resolved_type(fcx, expr.span, raw_base_t); alt lookup_op_method(fcx, expr, base, resolved, "[]", - [some(idx)]) { + [some(idx)]/~) { some((ret_ty, _)) { fcx.write_ty(id, ret_ty); } _ { tcx.sess.span_fatal( @@ -1592,7 +1593,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let p_ty = fcx.expr_ty(p); let lkup = method::lookup(fcx, p, p, expr.id, alloc_id, - @"alloc", p_ty, [], false); + @"alloc", p_ty, []/~, false); alt lkup.method() { some(entry) { fcx.ccx.method_map.insert(alloc_id, entry); @@ -1607,10 +1608,10 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, ty::mk_fn(tcx, {purity: ast::impure_fn, proto: ast::proto_any, inputs: [{mode: m, ty: ty_uint}, - {mode: m, ty: ty_uint}], + {mode: m, ty: ty_uint}]/~, output: ty_nilp, ret_style: ast::return_val, - constraints: []}) + constraints: []/~}) }; demand::suptype(fcx, expr.span, @@ -1791,13 +1792,13 @@ fn check_instantiable(tcx: ty::ctxt, fn check_enum_variants(ccx: @crate_ctxt, sp: span, - vs: [ast::variant], + vs: [ast::variant]/~, id: ast::node_id) { let rty = ty::node_id_to_type(ccx.tcx, id); let fcx = blank_fn_ctxt(ccx, rty); - let mut disr_vals: [int] = []; + let mut disr_vals: [int]/~ = []/~; let mut disr_val = 0; - let mut variants = []; + let mut variants = []/~; for vs.each {|v| alt v.node.disr_expr { some(e) { @@ -1825,14 +1826,14 @@ fn check_enum_variants(ccx: @crate_ctxt, ccx.tcx.sess.span_err(v.span, "discriminator value already exists"); } - disr_vals += [disr_val]; + disr_vals += [disr_val]/~; let ctor_ty = ty::node_id_to_type(ccx.tcx, v.node.id); let arg_tys = if v.node.args.len() > 0u { ty::ty_fn_args(ctor_ty).map {|a| a.ty } - } else { [] }; + } else { []/~ }; variants += [@{args: arg_tys, ctor_ty: ctor_ty, name: v.node.name, id: local_def(v.node.id), - disr_val: disr_val}]; + disr_val: disr_val}]/~; disr_val += 1; } @@ -1906,10 +1907,11 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool { ret bot; } -fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) { +fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr]/~, + args: [ast::arg]/~) { let num_args = vec::len(args); for cs.each {|c| - let mut c_args = []; + let mut c_args = []/~; for c.node.args.each {|a| c_args += [ // "base" should not occur in a fn type thing, as of @@ -1935,8 +1937,8 @@ fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) { ast::carg_ident(i) { if i < num_args { let p = @{span: a.span, global: false, - idents: [args[i].ident], - rp: none, types: []}; + idents: [args[i].ident]/~, + rp: none, types: []/~}; let arg_occ_node_id = fcx.ccx.tcx.sess.next_node_id(); fcx.ccx.tcx.def_map.insert @@ -1951,7 +1953,7 @@ fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) { carg_ident index out of bounds"); } } - }]; + }]/~; } let p_op: ast::expr_ = ast::expr_path(c.node.path); let oper: @ast::expr = @{id: c.node.id, node: p_op, span: c.span}; @@ -2017,7 +2019,7 @@ fn ty_param_bounds_and_ty_for_def(fcx: @fn_ctxt, sp: span, defn: ast::def) -> ast::def_fn(id, ast::crust_fn) { // Crust functions are just u8 pointers ret { - bounds: @[], + bounds: @[]/~, rp: ast::rp_none, ty: ty::mk_ptr( fcx.ccx.tcx, @@ -2167,7 +2169,7 @@ fn ast_expr_vstore_to_vstore(fcx: @fn_ctxt, e: @ast::expr, n: uint, fn check_bounds_are_used(ccx: @crate_ctxt, span: span, - tps: [ast::ty_param], + tps: [ast::ty_param]/~, rp: ast::region_param, ty: ty::t) { let mut r_used = alt rp { @@ -2218,26 +2220,26 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::native_item) { let tcx = ccx.tcx; let (n_tps, inputs, output) = alt *it.ident { "size_of" | - "pref_align_of" | "min_align_of" { (1u, [], ty::mk_uint(ccx.tcx)) } - "get_tydesc" { (1u, [], ty::mk_nil_ptr(tcx)) } - "init" { (1u, [], param(ccx, 0u)) } - "forget" { (1u, [arg(ast::by_move, param(ccx, 0u))], + "pref_align_of" | "min_align_of" { (1u, []/~, ty::mk_uint(ccx.tcx)) } + "get_tydesc" { (1u, []/~, ty::mk_nil_ptr(tcx)) } + "init" { (1u, []/~, param(ccx, 0u)) } + "forget" { (1u, [arg(ast::by_move, param(ccx, 0u))]/~, ty::mk_nil(tcx)) } - "reinterpret_cast" { (2u, [arg(ast::by_ref, param(ccx, 0u))], + "reinterpret_cast" { (2u, [arg(ast::by_ref, param(ccx, 0u))]/~, param(ccx, 1u)) } - "addr_of" { (1u, [arg(ast::by_ref, param(ccx, 0u))], + "addr_of" { (1u, [arg(ast::by_ref, param(ccx, 0u))]/~, ty::mk_imm_ptr(tcx, param(ccx, 0u))) } "move_val" | "move_val_init" { (1u, [arg(ast::by_mutbl_ref, param(ccx, 0u)), - arg(ast::by_move, param(ccx, 0u))], + arg(ast::by_move, param(ccx, 0u))]/~, ty::mk_nil(tcx)) } - "needs_drop" { (1u, [], ty::mk_bool(tcx)) } + "needs_drop" { (1u, []/~, ty::mk_bool(tcx)) } "visit_ty" { assert ccx.tcx.intrinsic_ifaces.contains_key(@"ty_visitor"); let (_, visitor_iface) = ccx.tcx.intrinsic_ifaces.get(@"ty_visitor"); - (1u, [arg(ast::by_ref, visitor_iface)], ty::mk_nil(tcx)) + (1u, [arg(ast::by_ref, visitor_iface)]/~, ty::mk_nil(tcx)) } "frame_address" { let fty = ty::mk_fn(ccx.tcx, { @@ -2248,12 +2250,12 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::native_item) { ty: ty::mk_imm_ptr( ccx.tcx, ty::mk_mach_uint(ccx.tcx, ast::ty_u8)) - }], + }]/~, output: ty::mk_nil(ccx.tcx), ret_style: ast::return_val, - constraints: [] + constraints: []/~ }); - (0u, [arg(ast::by_ref, fty)], ty::mk_nil(tcx)) + (0u, [arg(ast::by_ref, fty)]/~, ty::mk_nil(tcx)) } other { tcx.sess.span_err(it.span, "unrecognized intrinsic function: `" + @@ -2265,7 +2267,7 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::native_item) { proto: ast::proto_bare, inputs: inputs, output: output, ret_style: ast::return_val, - constraints: []}); + constraints: []/~}); let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id)); let i_n_tps = (*i_ty.bounds).len(); if i_n_tps != n_tps { diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs index 7a1eca4fc25..9aa4d37fd77 100644 --- a/src/rustc/middle/typeck/check/alt.rs +++ b/src/rustc/middle/typeck/check/alt.rs @@ -4,7 +4,7 @@ import middle::typeck::infer::methods; // next_ty_var, fn check_alt(fcx: @fn_ctxt, expr: @ast::expr, discrim: @ast::expr, - arms: [ast::arm]) -> bool { + arms: [ast::arm]/~) -> bool { let tcx = fcx.ccx.tcx; let mut bot; @@ -52,7 +52,7 @@ type pat_ctxt = { }; fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path, - subpats: option<[@ast::pat]>, expected: ty::t) { + subpats: option<[@ast::pat]/~>, expected: ty::t) { // Typecheck the path. let fcx = pcx.fcx; @@ -170,7 +170,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { } } ast::pat_ident(path, c) { - check_pat_variant(pcx, pat, path, some([]), expected); + check_pat_variant(pcx, pat, path, some([]/~), expected); } ast::pat_enum(path, subpats) { check_pat_variant(pcx, pat, path, subpats, expected); diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index 1c2d1234118..b54be2fadf1 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -23,7 +23,7 @@ class lookup { let mut self_ty: ty::t; let mut derefs: uint; let candidates: dvec; - let supplied_tps: [ty::t]; + let supplied_tps: [ty::t]/~; let include_private: bool; new(fcx: @fn_ctxt, @@ -33,7 +33,7 @@ class lookup { node_id: ast::node_id, //node id where to store type of fn m_name: ast::ident, //b in a.b(...) self_ty: ty::t, //type of a in a.b(...) - supplied_tps: [ty::t], //Xs in a.b::(...) + supplied_tps: [ty::t]/~, //Xs in a.b::(...) include_private: bool) { self.fcx = fcx; diff --git a/src/rustc/middle/typeck/check/regionmanip.rs b/src/rustc/middle/typeck/check/regionmanip.rs index 5d9572e029d..7eebe1a31ca 100644 --- a/src/rustc/middle/typeck/check/regionmanip.rs +++ b/src/rustc/middle/typeck/check/regionmanip.rs @@ -12,7 +12,7 @@ fn replace_bound_regions_in_fn_ty( fn_ty: ty::fn_ty} { let mut all_tys = ty::tys_in_fn_ty(fn_ty); - for self_ty.each { |t| all_tys += [t] } + for self_ty.each { |t| all_tys += [t]/~ } #debug["replace_bound_regions_in_fn_ty(self_ty=%?, fn_ty=%s, all_tys=%?)", self_ty.map { |t| ty_to_str(tcx, t) }, @@ -50,7 +50,7 @@ fn replace_bound_regions_in_fn_ty( fn create_bound_region_mapping( tcx: ty::ctxt, isr: isr_alist, - tys: [ty::t], + tys: [ty::t]/~, to_r: fn(ty::bound_region) -> ty::region) -> isr_alist { // Takes `isr` (described above), `to_r` (described above), diff --git a/src/rustc/middle/typeck/check/vtable.rs b/src/rustc/middle/typeck/check/vtable.rs index d772f6861de..e86773bf76a 100644 --- a/src/rustc/middle/typeck/check/vtable.rs +++ b/src/rustc/middle/typeck/check/vtable.rs @@ -1,6 +1,6 @@ import check::{fn_ctxt, impl_self_ty, methods}; -fn has_iface_bounds(tps: [ty::param_bounds]) -> bool { +fn has_iface_bounds(tps: [ty::param_bounds]/~) -> bool { vec::any(tps, {|bs| vec::any(*bs, {|b| alt b { ty::bound_iface(_) { true } _ { false } } @@ -9,17 +9,17 @@ fn has_iface_bounds(tps: [ty::param_bounds]) -> bool { } fn lookup_vtables(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span, - bounds: @[ty::param_bounds], substs: ty::substs, + bounds: @[ty::param_bounds]/~, substs: ty::substs, allow_unsafe: bool) -> vtable_res { let tcx = fcx.ccx.tcx; - let mut result = [], i = 0u; + let mut result = []/~, i = 0u; for substs.tps.each {|ty| for vec::each(*bounds[i]) {|bound| alt bound { ty::bound_iface(i_ty) { let i_ty = ty::subst(tcx, substs, i_ty); result += [lookup_vtable(fcx, isc, sp, ty, i_ty, - allow_unsafe)]; + allow_unsafe)]/~; } _ {} } @@ -105,7 +105,7 @@ fn lookup_vtable(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span, } _ { - let mut found = []; + let mut found = []/~; for list::each(isc) {|impls| /* For each impl in scope... */ @@ -144,7 +144,7 @@ fn lookup_vtable(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span, iface_tps, im.did); let subres = lookup_vtables(fcx, isc, sp, im_bs, substs_f, false); - found += [vtable_static(im.did, substs_f.tps, subres)]; + found += [vtable_static(im.did, substs_f.tps, subres)]/~; } alt found.len() { @@ -180,8 +180,8 @@ fn fixup_ty(fcx: @fn_ctxt, sp: span, ty: ty::t) -> ty::t { } } -fn connect_iface_tps(fcx: @fn_ctxt, sp: span, impl_tys: [ty::t], - iface_tys: [ty::t], impl_did: ast::def_id) { +fn connect_iface_tps(fcx: @fn_ctxt, sp: span, impl_tys: [ty::t]/~, + iface_tys: [ty::t]/~, impl_did: ast::def_id) { let tcx = fcx.ccx.tcx; let ity = option::get(ty::impl_iface(tcx, impl_did)); let iface_ty = ty::subst_tps(tcx, impl_tys, ity); @@ -251,7 +251,7 @@ fn resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, v: visit::vt<@fn_ctxt>) { Map this expression to that vtable (that is: "ex has vtable ") */ - cx.vtable_map.insert(ex.id, @[vtable]); + cx.vtable_map.insert(ex.id, @[vtable]/~); } _ {} } diff --git a/src/rustc/middle/typeck/check/writeback.rs b/src/rustc/middle/typeck/check/writeback.rs index 9e4f3e58ae5..b0059fab251 100644 --- a/src/rustc/middle/typeck/check/writeback.rs +++ b/src/rustc/middle/typeck/check/writeback.rs @@ -40,10 +40,10 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id) write_ty_to_tcx(tcx, id, t); alt fcx.opt_node_ty_substs(id) { some(substs) { - let mut new_tps = []; + let mut new_tps = []/~; for substs.tps.each {|subst| alt resolve_type_vars_in_type(fcx, sp, subst) { - some(t) { new_tps += [t]; } + some(t) { new_tps += [t]/~; } none { wbcx.success = false; ret none; } } } diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index ea04fd96b27..fdf455e398a 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -37,7 +37,7 @@ fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) { ast::item_iface(_, _, _) { let def_id = { crate: ast::local_crate, node: intrinsic_item.id }; - let substs = {self_r: none, self_ty: none, tps: []}; + let substs = {self_r: none, self_ty: none, tps: []/~}; let ty = ty::mk_iface(ccx.tcx, def_id, substs); ccx.tcx.intrinsic_ifaces.insert (intrinsic_item.ident, (def_id, ty)); @@ -96,8 +96,8 @@ impl of ast_conv for @crate_ctxt { fn get_enum_variant_types(ccx: @crate_ctxt, enum_ty: ty::t, - variants: [ast::variant], - ty_params: [ast::ty_param], + variants: [ast::variant]/~, + ty_params: [ast::ty_param]/~, rp: ast::region_param) { let tcx = ccx.tcx; @@ -118,7 +118,7 @@ fn get_enum_variant_types(ccx: @crate_ctxt, inputs: args, output: enum_ty, ret_style: ast::return_val, - constraints: []}) + constraints: []/~}) }; let tpt = {bounds: ty_param_bounds(ccx, ty_params), rp: rp, @@ -130,7 +130,7 @@ fn get_enum_variant_types(ccx: @crate_ctxt, fn ensure_iface_methods(ccx: @crate_ctxt, id: ast::node_id) { fn store_methods(ccx: @crate_ctxt, id: ast::node_id, - stuff: [T], f: fn@(T) -> ty::method) { + stuff: [T]/~, f: fn@(T) -> ty::method) { ty::store_iface_methods(ccx.tcx, id, @vec::map(stuff, f)); } @@ -224,11 +224,11 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, } fn check_methods_against_iface(ccx: @crate_ctxt, - tps: [ast::ty_param], + tps: [ast::ty_param]/~, rp: ast::region_param, selfty: ty::t, a_ifacety: @ast::iface_ref, - ms: [converted_method]) { + ms: [converted_method]/~) { let tcx = ccx.tcx; let (did, tpt) = instantiate_iface_ref(ccx, a_ifacety, rp); @@ -259,7 +259,7 @@ fn check_methods_against_iface(ccx: @crate_ctxt, fn convert_class_item(ccx: @crate_ctxt, rp: ast::region_param, - bounds: @[ty::param_bounds], + bounds: @[ty::param_bounds]/~, v: ast_util::ivar) { let tt = ccx.to_ty(type_rscope(rp), v.ty); write_ty_to_tcx(ccx.tcx, v.id, tt); @@ -270,10 +270,10 @@ fn convert_class_item(ccx: @crate_ctxt, type converted_method = {mty: ty::method, id: ast::node_id, span: span}; fn convert_methods(ccx: @crate_ctxt, - ms: [@ast::method], + ms: [@ast::method]/~, rp: ast::region_param, - rcvr_bounds: @[ty::param_bounds], - self_ty: ty::t) -> [converted_method] { + rcvr_bounds: @[ty::param_bounds]/~, + self_ty: ty::t) -> [converted_method]/~ { let tcx = ccx.tcx; vec::map(ms) { |m| @@ -344,7 +344,7 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { inputs: t_args, output: t_res, ret_style: ast::return_val, - constraints: []}); // tjc TODO + constraints: []/~}); // tjc TODO write_ty_to_tcx(tcx, ctor.node.id, t_ctor); tcx.tcache.insert(local_def(ctor.node.id), {bounds: tpt.bounds, @@ -549,26 +549,26 @@ fn ty_of_native_item(ccx: @crate_ctxt, it: @ast::native_item) } } fn ty_param_bounds(ccx: @crate_ctxt, - params: [ast::ty_param]) -> @[ty::param_bounds] { + params: [ast::ty_param]/~) -> @[ty::param_bounds]/~ { fn compute_bounds(ccx: @crate_ctxt, param: ast::ty_param) -> ty::param_bounds { @vec::flat_map(*param.bounds) { |b| alt b { - ast::bound_send { [ty::bound_send] } - ast::bound_copy { [ty::bound_copy] } - ast::bound_const { [ty::bound_const] } + ast::bound_send { [ty::bound_send]/~ } + ast::bound_copy { [ty::bound_copy]/~ } + ast::bound_const { [ty::bound_const]/~ } ast::bound_iface(t) { let ity = ast_ty_to_ty(ccx, empty_rscope, t); alt ty::get(ity).struct { ty::ty_iface(*) { - [ty::bound_iface(ity)] + [ty::bound_iface(ity)]/~ } _ { ccx.tcx.sess.span_err( t.span, "type parameter bounds must be \ interface types"); - [] + []/~ } } } @@ -590,7 +590,7 @@ fn ty_param_bounds(ccx: @crate_ctxt, fn ty_of_native_fn_decl(ccx: @crate_ctxt, decl: ast::fn_decl, - ty_params: [ast::ty_param], + ty_params: [ast::ty_param]/~, def_id: ast::def_id) -> ty::ty_param_bounds_and_ty { let bounds = ty_param_bounds(ccx, ty_params); @@ -603,14 +603,14 @@ fn ty_of_native_fn_decl(ccx: @crate_ctxt, inputs: input_tys, output: output_ty, ret_style: ast::return_val, - constraints: []}); + constraints: []/~}); let tpt = {bounds: bounds, rp: ast::rp_none, ty: t_fn}; ccx.tcx.tcache.insert(def_id, tpt); ret tpt; } -fn mk_ty_params(ccx: @crate_ctxt, atps: [ast::ty_param]) - -> {bounds: @[ty::param_bounds], params: [ty::t]} { +fn mk_ty_params(ccx: @crate_ctxt, atps: [ast::ty_param]/~) + -> {bounds: @[ty::param_bounds]/~, params: [ty::t]/~} { let mut i = 0u; let bounds = ty_param_bounds(ccx, atps); @@ -622,8 +622,8 @@ fn mk_ty_params(ccx: @crate_ctxt, atps: [ast::ty_param]) })} } -fn mk_substs(ccx: @crate_ctxt, atps: [ast::ty_param], rp: ast::region_param) - -> {bounds: @[ty::param_bounds], substs: ty::substs} { +fn mk_substs(ccx: @crate_ctxt, atps: [ast::ty_param]/~, rp: ast::region_param) + -> {bounds: @[ty::param_bounds]/~, substs: ty::substs} { let {bounds, params} = mk_ty_params(ccx, atps); let self_r = alt rp { diff --git a/src/rustc/middle/typeck/infer.rs b/src/rustc/middle/typeck/infer.rs index 1225908abb8..4652ddc2747 100644 --- a/src/rustc/middle/typeck/infer.rs +++ b/src/rustc/middle/typeck/infer.rs @@ -293,7 +293,7 @@ enum var_value { type vals_and_bindings = { vals: smallintmap>, - mut bindings: [(V, var_value)] + mut bindings: [(V, var_value)]/~ }; enum node = { @@ -346,9 +346,9 @@ type fres = result::result; fn new_infer_ctxt(tcx: ty::ctxt) -> infer_ctxt { infer_ctxt(@{tcx: tcx, - tvb: {vals: smallintmap::mk(), mut bindings: []}, - tvib: {vals: smallintmap::mk(), mut bindings: []}, - rb: {vals: smallintmap::mk(), mut bindings: []}, + tvb: {vals: smallintmap::mk(), mut bindings: []/~}, + tvib: {vals: smallintmap::mk(), mut bindings: []/~}, + rb: {vals: smallintmap::mk(), mut bindings: []/~}, ty_var_counter: @mut 0u, ty_var_integral_counter: @mut 0u, region_var_counter: @mut 0u})} @@ -556,8 +556,8 @@ impl transaction_methods for infer_ctxt { // TODO---could use a vec::clear() that ran destructors but kept // the vec at its currently allocated length - self.tvb.bindings = []; - self.rb.bindings = []; + self.tvb.bindings = []/~; + self.rb.bindings = []/~; ret r; } @@ -604,7 +604,7 @@ impl methods for infer_ctxt { ty::mk_var(self.tcx, self.next_ty_var_id()) } - fn next_ty_vars(n: uint) -> [ty::t] { + fn next_ty_vars(n: uint) -> [ty::t]/~ { vec::from_fn(n) {|_i| self.next_ty_var() } } @@ -1041,7 +1041,7 @@ impl unify_methods for infer_ctxt { } fn constrvecs( - as: [@ty::type_constr], bs: [@ty::type_constr]) -> ures { + as: [@ty::type_constr]/~, bs: [@ty::type_constr]/~) -> ures { if check vec::same_length(as, bs) { iter_vec2(as, bs) {|a,b| @@ -1082,8 +1082,8 @@ impl unify_methods for infer_ctxt { // resolution. The first is a shallow resolution: this only resolves // one layer, but does not resolve any nested variables. So, for // example, if we have two variables A and B, and the constraint that -// A <: [B] and B <: int, then shallow resolution on A would yield -// [B]. Deep resolution, on the other hand, would yield [int]. +// A <: [B]/~ and B <: int, then shallow resolution on A would yield +// [B]/~. Deep resolution, on the other hand, would yield [int]/~. // // But there is one more knob: the `force_level` variable controls // the behavior in the face of unconstrained type and region @@ -1107,8 +1107,8 @@ type resolve_state = @{ deep: bool, force_vars: force_level, mut err: option, - mut r_seen: [region_vid], - mut v_seen: [tv_vid] + mut r_seen: [region_vid]/~, + mut v_seen: [tv_vid]/~ }; fn resolver(infcx: infer_ctxt, deep: bool, fvars: force_level) @@ -1117,8 +1117,8 @@ fn resolver(infcx: infer_ctxt, deep: bool, fvars: force_level) deep: deep, force_vars: fvars, mut err: none, - mut r_seen: [], - mut v_seen: []} + mut r_seen: []/~, + mut v_seen: []/~} } impl methods for resolve_state { @@ -1308,14 +1308,14 @@ impl methods for resolve_state { // // Assuming we have a bound from both sides, we will then examine // these bounds and see if they have the form (@M_a T_a, &rb.M_b T_b) -// (resp. ~M_a T_a, [M_a T_a], etc). If they do not, we fall back to +// (resp. ~M_a T_a, [M_a T_a]/~, etc). If they do not, we fall back to // subtyping. // // If they *do*, then we know that the two types could never be // subtypes of one another. We will then construct a type @const T_b // and ensure that type a is a subtype of that. This allows for the -// possibility of assigning from a type like (say) @[mut T1] to a type -// &[T2] where T1 <: T2. This might seem surprising, since the `@` +// possibility of assigning from a type like (say) @[mut T1]/~ to a type +// &[T2]/~ where T1 <: T2. This might seem surprising, since the `@` // points at mutable memory but the `&` points at immutable memory. // This would in fact be unsound, except for the borrowck, which comes // later and guarantees that such mutability conversions are safe. @@ -1533,7 +1533,7 @@ iface combine { fn mts(a: ty::mt, b: ty::mt) -> cres; fn contratys(a: ty::t, b: ty::t) -> cres; fn tys(a: ty::t, b: ty::t) -> cres; - fn tps(as: [ty::t], bs: [ty::t]) -> cres<[ty::t]>; + fn tps(as: [ty::t]/~, bs: [ty::t]/~) -> cres<[ty::t]/~>; fn self_tys(a: option, b: option) -> cres>; fn substs(as: ty::substs, bs: ty::substs) -> cres; fn fns(a: ty::fn_ty, b: ty::fn_ty) -> cres; @@ -1592,7 +1592,7 @@ fn super_substs( } fn super_tps( - self: C, as: [ty::t], bs: [ty::t]) -> cres<[ty::t]> { + self: C, as: [ty::t]/~, bs: [ty::t]/~) -> cres<[ty::t]/~> { // Note: type parameters are always treated as *invariant* // (otherwise the type system would be unsound). In the @@ -1692,8 +1692,8 @@ fn super_vstores( fn super_fns( self: C, a_f: ty::fn_ty, b_f: ty::fn_ty) -> cres { - fn argvecs( - self: C, a_args: [ty::arg], b_args: [ty::arg]) -> cres<[ty::arg]> { + fn argvecs(self: C, a_args: [ty::arg]/~, + b_args: [ty::arg]/~) -> cres<[ty::arg]/~> { if check vec::same_length(a_args, b_args) { map_vec2(a_args, b_args) {|a, b| self.args(a, b) } @@ -2061,7 +2061,7 @@ impl of combine for sub { super_substs(self, as, bs) } - fn tps(as: [ty::t], bs: [ty::t]) -> cres<[ty::t]> { + fn tps(as: [ty::t]/~, bs: [ty::t]/~) -> cres<[ty::t]/~> { super_tps(self, as, bs) } @@ -2247,7 +2247,7 @@ impl of combine for lub { super_substs(self, as, bs) } - fn tps(as: [ty::t], bs: [ty::t]) -> cres<[ty::t]> { + fn tps(as: [ty::t]/~, bs: [ty::t]/~) -> cres<[ty::t]/~> { super_tps(self, as, bs) } @@ -2446,7 +2446,7 @@ impl of combine for glb { super_substs(self, as, bs) } - fn tps(as: [ty::t], bs: [ty::t]) -> cres<[ty::t]> { + fn tps(as: [ty::t]/~, bs: [ty::t]/~) -> cres<[ty::t]/~> { super_tps(self, as, bs) } diff --git a/src/rustc/util/common.rs b/src/rustc/util/common.rs index 381b5ce6a82..74c57007880 100644 --- a/src/rustc/util/common.rs +++ b/src/rustc/util/common.rs @@ -29,9 +29,9 @@ type flag = hashmap; fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; } -fn field_exprs(fields: [ast::field]) -> [@ast::expr] { - let mut es = []; - for fields.each {|f| es += [f.node.expr]; } +fn field_exprs(fields: [ast::field]/~) -> [@ast::expr]/~ { + let mut es = []/~; + for fields.each {|f| es += [f.node.expr]/~; } ret es; } diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index f44d43d35ed..5d2098a2e5f 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -96,7 +96,7 @@ fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> str { } } -fn tys_to_str(cx: ctxt, ts: [t]) -> str { +fn tys_to_str(cx: ctxt, ts: [t]/~) -> str { let mut rs = ""; for ts.each {|t| rs += ty_to_str(cx, t); } rs @@ -121,8 +121,8 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { } fn fn_to_str(cx: ctxt, purity: ast::purity, proto: ast::proto, ident: option, - inputs: [arg], output: t, cf: ast::ret_style, - constrs: [@constr]) -> str { + inputs: [arg]/~, output: t, cf: ast::ret_style, + constrs: [@constr]/~) -> str { let mut s; s = alt purity { @@ -132,8 +132,8 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { s += proto_to_str(proto); alt ident { some(i) { s += " "; s += *i; } _ { } } s += "("; - let mut strs = []; - for inputs.each {|a| strs += [fn_input_to_str(cx, a)]; } + let mut strs = []/~; + for inputs.each {|a| strs += [fn_input_to_str(cx, a)]/~; } s += str::connect(strs, ", "); s += ")"; if ty::get(output).struct != ty_nil { @@ -189,13 +189,13 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { ty_unboxed_vec(tm) { "unboxed_vec<" + mt_to_str(cx, tm) + ">" } ty_type { "type" } ty_rec(elems) { - let mut strs: [str] = []; - for elems.each {|fld| strs += [field_to_str(cx, fld)]; } + let mut strs: [str]/~ = []/~; + for elems.each {|fld| strs += [field_to_str(cx, fld)]/~; } "{" + str::connect(strs, ",") + "}" } ty_tup(elems) { - let mut strs = []; - for elems.each {|elem| strs += [ty_to_str(cx, elem)]; } + let mut strs = []/~; + for elems.each {|elem| strs += [ty_to_str(cx, elem)]/~; } "(" + str::connect(strs, ",") + ")" } ty_fn(f) { @@ -205,7 +205,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { ty_var(v) { v.to_str() } ty_var_integral(v) { v.to_str() } ty_param(id, _) { - "'" + str::from_bytes([('a' as u8) + (id as u8)]) + "'" + str::from_bytes([('a' as u8) + (id as u8)]/~) } ty_self { "self" } ty_enum(did, substs) | ty_class(did, substs) { @@ -234,7 +234,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { fn parameterized(cx: ctxt, base: str, self_r: option, - tps: [ty::t]) -> str { + tps: [ty::t]/~) -> str { let r_str = alt self_r { none { "" } @@ -262,7 +262,7 @@ fn constr_to_str(c: @constr) -> str { pprust::constr_args_to_str(pprust::uint_to_str, c.node.args); } -fn constrs_str(constrs: [@constr]) -> str { +fn constrs_str(constrs: [@constr]/~) -> str { let mut s = ""; let mut colon = true; for constrs.each {|c| diff --git a/src/rustdoc/attr_parser.rs b/src/rustdoc/attr_parser.rs index 4f4ce761e12..44c2b8da3e6 100644 --- a/src/rustdoc/attr_parser.rs +++ b/src/rustdoc/attr_parser.rs @@ -20,7 +20,7 @@ type crate_attrs = { #[cfg(test)] mod test { - fn parse_attributes(source: str) -> [ast::attribute] { + fn parse_attributes(source: str) -> [ast::attribute]/~ { import syntax::parse; import parse::parser; import parse::attr::parser_attr; @@ -29,14 +29,14 @@ mod test { let parse_sess = syntax::parse::new_parse_sess(none); let parser = parse::new_parser_from_source_str( - parse_sess, [], "-", codemap::fss_none, @source); + parse_sess, []/~, "-", codemap::fss_none, @source); parser.parse_outer_attributes() } } fn doc_meta( - attrs: [ast::attribute] + attrs: [ast::attribute]/~ ) -> option<@ast::meta_item> { #[doc = @@ -55,7 +55,7 @@ fn doc_meta( } } -fn parse_crate(attrs: [ast::attribute]) -> crate_attrs { +fn parse_crate(attrs: [ast::attribute]/~) -> crate_attrs { let link_metas = attr::find_linkage_metas(attrs); { @@ -88,7 +88,7 @@ fn should_not_extract_crate_name_if_no_name_value_in_link_attribute() { assert attrs.name == none; } -fn parse_desc(attrs: [ast::attribute]) -> option { +fn parse_desc(attrs: [ast::attribute]/~) -> option { alt doc_meta(attrs) { some(meta) { attr::get_meta_item_value_str(meta).map({|x|*x}) @@ -113,7 +113,7 @@ fn parse_desc_should_parse_simple_doc_attributes() { assert attrs == some("basic"); } -fn parse_hidden(attrs: [ast::attribute]) -> bool { +fn parse_hidden(attrs: [ast::attribute]/~) -> bool { alt doc_meta(attrs) { some(meta) { alt attr::get_meta_item_list(meta) { diff --git a/src/rustdoc/attr_pass.rs b/src/rustdoc/attr_pass.rs index 983576e5816..a88f3d7b07f 100644 --- a/src/rustdoc/attr_pass.rs +++ b/src/rustdoc/attr_pass.rs @@ -90,7 +90,7 @@ fn fold_item( fn parse_item_attrs( srv: astsrv::srv, id: doc::ast_id, - +parse_attrs: fn~([ast::attribute]) -> T) -> T { + +parse_attrs: fn~([ast::attribute]/~) -> T) -> T { astsrv::exec(srv) {|ctxt| let attrs = alt ctxt.ast_map.get(id) { ast_map::node_item(item, _) { item.attrs } @@ -197,11 +197,11 @@ fn fold_iface( fn merge_method_attrs( srv: astsrv::srv, item_id: doc::ast_id, - docs: [doc::methoddoc] -) -> [doc::methoddoc] { + docs: [doc::methoddoc]/~ +) -> [doc::methoddoc]/~ { // Create an assoc list from method name to attributes - let attrs: [(str, option)] = astsrv::exec(srv) {|ctxt| + let attrs: [(str, option)]/~ = astsrv::exec(srv) {|ctxt| alt ctxt.ast_map.get(item_id) { ast_map::node_item(@{ node: ast::item_iface(_, _, methods), _ diff --git a/src/rustdoc/config.rs b/src/rustdoc/config.rs index fc8bbf09a65..7a5c3cf67ca 100644 --- a/src/rustdoc/config.rs +++ b/src/rustdoc/config.rs @@ -39,7 +39,7 @@ fn opt_output_style() -> str { "output-style" } fn opt_pandoc_cmd() -> str { "pandoc-cmd" } fn opt_help() -> str { "h" } -fn opts() -> [(getopts::opt, str)] { +fn opts() -> [(getopts::opt, str)]/~ { [ (getopts::optopt(opt_output_dir()), "--output-dir put documents here"), @@ -51,13 +51,13 @@ fn opts() -> [(getopts::opt, str)] { "--pandoc-cmd the command for running pandoc"), (getopts::optflag(opt_help()), "-h print help") - ] + ]/~ } fn usage() { import io::println; - println("Usage: rustdoc [options] \n"); + println("Usage: rustdoc [options]/~ \n"); println("Options:\n"); for opts().each {|opt| println(#fmt(" %s", tuple::second(opt))); @@ -75,9 +75,9 @@ fn default_config(input_crate: str) -> config { } } -type program_output = fn~(str, [str]) -> {status: int, out: str, err: str}; +type program_output = fn~(str, [str]/~) -> {status: int, out: str, err: str}; -fn mock_program_output(_prog: str, _args: [str]) -> { +fn mock_program_output(_prog: str, _args: [str]/~) -> { status: int, out: str, err: str } { { @@ -87,12 +87,12 @@ fn mock_program_output(_prog: str, _args: [str]) -> { } } -fn parse_config(args: [str]) -> result { +fn parse_config(args: [str]/~) -> result { parse_config_(args, run::program_output) } fn parse_config_( - args: [str], + args: [str]/~, program_output: program_output ) -> result { let args = vec::tail(args); @@ -194,19 +194,19 @@ fn maybe_find_pandoc( } let possible_pandocs = alt maybe_pandoc_cmd { - some(pandoc_cmd) { [pandoc_cmd] } + some(pandoc_cmd) { [pandoc_cmd]/~ } none { - ["pandoc"] + alt os::homedir() { + ["pandoc"]/~ + alt os::homedir() { some(dir) { - [path::connect(dir, ".cabal/bin/pandoc")] + [path::connect(dir, ".cabal/bin/pandoc")]/~ } - none { [] } + none { []/~ } } } }; let pandoc = vec::find(possible_pandocs) {|pandoc| - let output = program_output(pandoc, ["--version"]); + let output = program_output(pandoc, ["--version"]/~); #debug("testing pandoc cmd %s: %?", pandoc, output); output.status == 0 }; @@ -224,7 +224,7 @@ fn should_find_pandoc() { output_format: pandoc_html with default_config("test") }; - let mock_program_output = fn~(_prog: str, _args: [str]) -> { + let mock_program_output = fn~(_prog: str, _args: [str]/~) -> { status: int, out: str, err: str } { { @@ -241,7 +241,7 @@ fn should_error_with_no_pandoc() { output_format: pandoc_html with default_config("test") }; - let mock_program_output = fn~(_prog: str, _args: [str]) -> { + let mock_program_output = fn~(_prog: str, _args: [str]/~) -> { status: int, out: str, err: str } { { @@ -254,26 +254,27 @@ fn should_error_with_no_pandoc() { #[cfg(test)] mod test { - fn parse_config(args: [str]) -> result { + fn parse_config(args: [str]/~) -> result { parse_config_(args, mock_program_output) } } #[test] fn should_error_with_no_crates() { - let config = test::parse_config(["rustdoc"]); + let config = test::parse_config(["rustdoc"]/~); assert result::get_err(config) == "no crates specified"; } #[test] fn should_error_with_multiple_crates() { - let config = test::parse_config(["rustdoc", "crate1.rc", "crate2.rc"]); + let config = + test::parse_config(["rustdoc", "crate1.rc", "crate2.rc"]/~); assert result::get_err(config) == "multiple crates specified"; } #[test] fn should_set_output_dir_to_cwd_if_not_provided() { - let config = test::parse_config(["rustdoc", "crate.rc"]); + let config = test::parse_config(["rustdoc", "crate.rc"]/~); assert result::get(config).output_dir == "."; } @@ -281,13 +282,13 @@ fn should_set_output_dir_to_cwd_if_not_provided() { fn should_set_output_dir_if_provided() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-dir", "snuggles" - ]); + ]/~); assert result::get(config).output_dir == "snuggles"; } #[test] fn should_set_output_format_to_pandoc_html_if_not_provided() { - let config = test::parse_config(["rustdoc", "crate.rc"]); + let config = test::parse_config(["rustdoc", "crate.rc"]/~); assert result::get(config).output_format == pandoc_html; } @@ -295,7 +296,7 @@ fn should_set_output_format_to_pandoc_html_if_not_provided() { fn should_set_output_format_to_markdown_if_requested() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-format", "markdown" - ]); + ]/~); assert result::get(config).output_format == markdown; } @@ -303,7 +304,7 @@ fn should_set_output_format_to_markdown_if_requested() { fn should_set_output_format_to_pandoc_html_if_requested() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-format", "html" - ]); + ]/~); assert result::get(config).output_format == pandoc_html; } @@ -311,13 +312,13 @@ fn should_set_output_format_to_pandoc_html_if_requested() { fn should_error_on_bogus_format() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-format", "bogus" - ]); + ]/~); assert result::get_err(config) == "unknown output format 'bogus'"; } #[test] fn should_set_output_style_to_doc_per_mod_by_default() { - let config = test::parse_config(["rustdoc", "crate.rc"]); + let config = test::parse_config(["rustdoc", "crate.rc"]/~); assert result::get(config).output_style == doc_per_mod; } @@ -325,7 +326,7 @@ fn should_set_output_style_to_doc_per_mod_by_default() { fn should_set_output_style_to_one_doc_if_requested() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-style", "doc-per-crate" - ]); + ]/~); assert result::get(config).output_style == doc_per_crate; } @@ -333,7 +334,7 @@ fn should_set_output_style_to_one_doc_if_requested() { fn should_set_output_style_to_doc_per_mod_if_requested() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-style", "doc-per-mod" - ]); + ]/~); assert result::get(config).output_style == doc_per_mod; } @@ -341,7 +342,7 @@ fn should_set_output_style_to_doc_per_mod_if_requested() { fn should_error_on_bogus_output_style() { let config = test::parse_config([ "rustdoc", "crate.rc", "--output-style", "bogus" - ]); + ]/~); assert result::get_err(config) == "unknown output style 'bogus'"; } @@ -349,12 +350,12 @@ fn should_error_on_bogus_output_style() { fn should_set_pandoc_command_if_requested() { let config = test::parse_config([ "rustdoc", "crate.rc", "--pandoc-cmd", "panda-bear-doc" - ]); + ]/~); assert result::get(config).pandoc_cmd == some("panda-bear-doc"); } #[test] fn should_set_pandoc_command_when_using_pandoc() { - let config = test::parse_config(["rustdoc", "crate.rc"]); + let config = test::parse_config(["rustdoc", "crate.rc"]/~); assert result::get(config).pandoc_cmd == some("pandoc"); -} \ No newline at end of file +} diff --git a/src/rustdoc/demo.rs b/src/rustdoc/demo.rs index 9c700a23720..b40995a0c82 100644 --- a/src/rustdoc/demo.rs +++ b/src/rustdoc/demo.rs @@ -25,12 +25,12 @@ enum omnomnomy { #[doc = "Delicious sugar cookies"] cookie, #[doc = "It's pizza"] - pizza_pie([uint]) + pizza_pie([uint]/~) } fn take_my_order_please( _waitress: waitress, - _order: [omnomnomy] + _order: [omnomnomy]/~ ) -> uint { #[doc = " diff --git a/src/rustdoc/desc_to_brief_pass.rs b/src/rustdoc/desc_to_brief_pass.rs index 0ad21a77bdb..83fda9f7b5c 100644 --- a/src/rustdoc/desc_to_brief_pass.rs +++ b/src/rustdoc/desc_to_brief_pass.rs @@ -161,11 +161,11 @@ fn first_sentence_(s: str) -> str { } } -fn paragraphs(s: str) -> [str] { +fn paragraphs(s: str) -> [str]/~ { let lines = str::lines_any(s); let mut whitespace_lines = 0; let mut accum = ""; - let paras = vec::foldl([], lines) {|paras, line| + let paras = vec::foldl([]/~, lines) {|paras, line| let mut res = paras; if str::is_whitespace(line) { @@ -173,7 +173,7 @@ fn paragraphs(s: str) -> [str] { } else { if whitespace_lines > 0 { if str::is_not_empty(accum) { - res += [accum]; + res += [accum]/~; accum = ""; } } @@ -191,7 +191,7 @@ fn paragraphs(s: str) -> [str] { }; if str::is_not_empty(accum) { - paras + [accum] + paras + [accum]/~ } else { paras } @@ -200,13 +200,13 @@ fn paragraphs(s: str) -> [str] { #[test] fn test_paragraphs_1() { let paras = paragraphs("1\n\n2"); - assert paras == ["1", "2"]; + assert paras == ["1", "2"]/~; } #[test] fn test_paragraphs_2() { let paras = paragraphs("\n\n1\n1\n\n2\n\n"); - assert paras == ["1\n1", "2"]; + assert paras == ["1\n1", "2"]/~; } #[test] diff --git a/src/rustdoc/doc.rs b/src/rustdoc/doc.rs index 12ee7e6ca70..3665e9cc331 100644 --- a/src/rustdoc/doc.rs +++ b/src/rustdoc/doc.rs @@ -3,7 +3,7 @@ type ast_id = int; type doc = { - pages: [page] + pages: [page]/~ }; enum page { @@ -41,10 +41,10 @@ enum itemtag { type itemdoc = { id: ast_id, name: str, - path: [str], + path: [str]/~, brief: option, desc: option, - sections: [section], + sections: [section]/~, // Indicates that this node is a reexport of a different item reexport: bool }; @@ -56,13 +56,13 @@ type simpleitemdoc = { type moddoc = { item: itemdoc, - items: [itemtag], + items: [itemtag]/~, index: option }; type nmoddoc = { item: itemdoc, - fns: [fndoc], + fns: [fndoc]/~, index: option }; @@ -72,7 +72,7 @@ type fndoc = simpleitemdoc; type enumdoc = { item: itemdoc, - variants: [variantdoc] + variants: [variantdoc]/~ }; type variantdoc = { @@ -83,14 +83,14 @@ type variantdoc = { type ifacedoc = { item: itemdoc, - methods: [methoddoc] + methods: [methoddoc]/~ }; type methoddoc = { name: str, brief: option, desc: option, - sections: [section], + sections: [section]/~, sig: option }; @@ -98,13 +98,13 @@ type impldoc = { item: itemdoc, iface_ty: option, self_ty: option, - methods: [methoddoc] + methods: [methoddoc]/~ }; type tydoc = simpleitemdoc; type index = { - entries: [index_entry] + entries: [index_entry]/~ }; #[doc = " @@ -144,7 +144,7 @@ impl util for doc { #[doc = "Some helper methods on moddoc, mostly for testing"] impl util for moddoc { - fn mods() -> [moddoc] { + fn mods() -> [moddoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { modtag(moddoc) { some(moddoc) } @@ -153,7 +153,7 @@ impl util for moddoc { } } - fn nmods() -> [nmoddoc] { + fn nmods() -> [nmoddoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { nmodtag(nmoddoc) { some(nmoddoc) } @@ -162,7 +162,7 @@ impl util for moddoc { } } - fn fns() -> [fndoc] { + fn fns() -> [fndoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { fntag(fndoc) { some(fndoc) } @@ -171,7 +171,7 @@ impl util for moddoc { } } - fn consts() -> [constdoc] { + fn consts() -> [constdoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { consttag(constdoc) { some(constdoc) } @@ -180,7 +180,7 @@ impl util for moddoc { } } - fn enums() -> [enumdoc] { + fn enums() -> [enumdoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { enumtag(enumdoc) { some(enumdoc) } @@ -189,7 +189,7 @@ impl util for moddoc { } } - fn ifaces() -> [ifacedoc] { + fn ifaces() -> [ifacedoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { ifacetag(ifacedoc) { some(ifacedoc) } @@ -198,7 +198,7 @@ impl util for moddoc { } } - fn impls() -> [impldoc] { + fn impls() -> [impldoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { impltag(impldoc) { some(impldoc) } @@ -207,7 +207,7 @@ impl util for moddoc { } } - fn types() -> [tydoc] { + fn types() -> [tydoc]/~ { vec::filter_map(self.items) {|itemtag| alt itemtag { tytag(tydoc) { some(tydoc) } @@ -217,9 +217,9 @@ impl util for moddoc { } } -impl util for [page] { +impl util for [page]/~ { - fn mods() -> [moddoc] { + fn mods() -> [moddoc]/~ { vec::filter_map(self) {|page| alt page { itempage(modtag(moddoc)) { some(moddoc) } @@ -228,7 +228,7 @@ impl util for [page] { } } - fn nmods() -> [nmoddoc] { + fn nmods() -> [nmoddoc]/~ { vec::filter_map(self) {|page| alt page { itempage(nmodtag(nmoddoc)) { some(nmoddoc) } @@ -237,7 +237,7 @@ impl util for [page] { } } - fn fns() -> [fndoc] { + fn fns() -> [fndoc]/~ { vec::filter_map(self) {|page| alt page { itempage(fntag(fndoc)) { some(fndoc) } @@ -246,7 +246,7 @@ impl util for [page] { } } - fn consts() -> [constdoc] { + fn consts() -> [constdoc]/~ { vec::filter_map(self) {|page| alt page { itempage(consttag(constdoc)) { some(constdoc) } @@ -255,7 +255,7 @@ impl util for [page] { } } - fn enums() -> [enumdoc] { + fn enums() -> [enumdoc]/~ { vec::filter_map(self) {|page| alt page { itempage(enumtag(enumdoc)) { some(enumdoc) } @@ -264,7 +264,7 @@ impl util for [page] { } } - fn ifaces() -> [ifacedoc] { + fn ifaces() -> [ifacedoc]/~ { vec::filter_map(self) {|page| alt page { itempage(ifacetag(ifacedoc)) { some(ifacedoc) } @@ -273,7 +273,7 @@ impl util for [page] { } } - fn impls() -> [impldoc] { + fn impls() -> [impldoc]/~ { vec::filter_map(self) {|page| alt page { itempage(impltag(impldoc)) { some(impldoc) } @@ -282,7 +282,7 @@ impl util for [page] { } } - fn types() -> [tydoc] { + fn types() -> [tydoc]/~ { vec::filter_map(self) {|page| alt page { itempage(tytag(tydoc)) { some(tydoc) } @@ -344,7 +344,7 @@ impl util for A { self.item().name } - fn path() -> [str] { + fn path() -> [str]/~ { self.item().path } @@ -356,7 +356,7 @@ impl util for A { self.item().desc } - fn sections() -> [section] { + fn sections() -> [section]/~ { self.item().sections } -} \ No newline at end of file +} diff --git a/src/rustdoc/extract.rs b/src/rustdoc/extract.rs index 99663adf32f..29ac568e2e2 100644 --- a/src/rustdoc/extract.rs +++ b/src/rustdoc/extract.rs @@ -25,7 +25,7 @@ fn extract( doc::cratepage({ topmod: top_moddoc_from_crate(crate, default_name), }) - ] + ]/~ } } @@ -41,10 +41,10 @@ fn mk_itemdoc(id: ast::node_id, name: ast::ident) -> doc::itemdoc { { id: id, name: *name, - path: [], + path: []/~, brief: none, desc: none, - sections: [], + sections: []/~, reexport: false } } @@ -148,7 +148,7 @@ fn should_extract_const_name_and_id() { fn enumdoc_from_enum( itemdoc: doc::itemdoc, - variants: [ast::variant] + variants: [ast::variant]/~ ) -> doc::enumdoc { { item: itemdoc, @@ -157,8 +157,8 @@ fn enumdoc_from_enum( } fn variantdocs_from_variants( - variants: [ast::variant] -) -> [doc::variantdoc] { + variants: [ast::variant]/~ +) -> [doc::variantdoc]/~ { par::seqmap(variants, variantdoc_from_variant) } @@ -185,7 +185,7 @@ fn should_extract_enum_variants() { fn ifacedoc_from_iface( itemdoc: doc::itemdoc, - methods: [ast::ty_method] + methods: [ast::ty_method]/~ ) -> doc::ifacedoc { { item: itemdoc, @@ -194,7 +194,7 @@ fn ifacedoc_from_iface( name: *method.ident, brief: none, desc: none, - sections: [], + sections: []/~, sig: none } } @@ -215,7 +215,7 @@ fn should_extract_iface_methods() { fn impldoc_from_impl( itemdoc: doc::itemdoc, - methods: [@ast::method] + methods: [@ast::method]/~ ) -> doc::impldoc { { item: itemdoc, @@ -226,7 +226,7 @@ fn impldoc_from_impl( name: *method.ident, brief: none, desc: none, - sections: [], + sections: []/~, sig: none } } @@ -344,4 +344,4 @@ mod test { assert doc.cratemod().name() == "name"; } } -} \ No newline at end of file +} diff --git a/src/rustdoc/markdown_index_pass.rs b/src/rustdoc/markdown_index_pass.rs index f7b49be1ed5..ef8acd39df6 100644 --- a/src/rustdoc/markdown_index_pass.rs +++ b/src/rustdoc/markdown_index_pass.rs @@ -118,6 +118,7 @@ fn pandoc_header_id(header: str) -> str { let s = str::replace(s, ")", ""); let s = str::replace(s, "@", ""); let s = str::replace(s, "~", ""); + let s = str::replace(s, "/", ""); ret s; } fn replace_with_hyphens(s: str) -> str { @@ -131,7 +132,7 @@ fn pandoc_header_id(header: str) -> str { #[test] fn should_remove_punctuation_from_headers() { assert pandoc_header_id("impl foo of bar") == "impl-foo-of-bara"; - assert pandoc_header_id("fn@([~A])") == "fna"; + assert pandoc_header_id("fn@([~A]/~)") == "fna"; } #[test] diff --git a/src/rustdoc/markdown_pass.rs b/src/rustdoc/markdown_pass.rs index 3e7a9325bd6..18b06092f05 100644 --- a/src/rustdoc/markdown_pass.rs +++ b/src/rustdoc/markdown_pass.rs @@ -211,7 +211,7 @@ fn header_kind(doc: doc::itemtag) -> str { } fn header_name(doc: doc::itemtag) -> str { - let fullpath = str::connect(doc.path() + [doc.name()], "::"); + let fullpath = str::connect(doc.path() + [doc.name()]/~, "::"); alt doc { doc::modtag(_) if doc.id() != syntax::ast::crate_node_id { fullpath @@ -275,7 +275,7 @@ fn should_write_full_path_to_mod() { fn write_common( ctxt: ctxt, desc: option, - sections: [doc::section] + sections: [doc::section]/~ ) { write_desc(ctxt, desc); write_sections(ctxt, sections); @@ -294,7 +294,7 @@ fn write_desc( } } -fn write_sections(ctxt: ctxt, sections: [doc::section]) { +fn write_sections(ctxt: ctxt, sections: [doc::section]/~) { vec::iter(sections) {|section| write_section(ctxt, section); } @@ -381,10 +381,10 @@ fn write_index(ctxt: ctxt, index: doc::index) { let header = header_text_(entry.kind, entry.name); let id = entry.link; if option::is_some(entry.brief) { - ctxt.w.write_line(#fmt("* [%s](%s) - %s", + ctxt.w.write_line(#fmt("* [%s]/~(%s) - %s", header, id, option::get(entry.brief))); } else { - ctxt.w.write_line(#fmt("* [%s](%s)", header, id)); + ctxt.w.write_line(#fmt("* [%s]/~(%s)", header, id)); } } ctxt.w.write_line(""); @@ -395,8 +395,8 @@ fn should_write_index() { let markdown = test::render("mod a { } mod b { }"); assert str::contains( markdown, - "\n\n* [Module `a`](#module-a)\n\ - * [Module `b`](#module-b)\n\n" + "\n\n* [Module `a`]/~(#module-a)\n\ + * [Module `b`]/~(#module-b)\n\n" ); } @@ -417,7 +417,7 @@ fn should_write_index_for_native_mods() { let markdown = test::render("native mod a { fn a(); }"); assert str::contains( markdown, - "\n\n* [Function `a`](#function-a)\n\n" + "\n\n* [Function `a`]/~(#function-a)\n\n" ); } @@ -468,7 +468,7 @@ fn write_fnlike( ctxt: ctxt, sig: option, desc: option, - sections: [doc::section] + sections: [doc::section]/~ ) { write_sig(ctxt, sig); write_common(ctxt, desc, sections); @@ -518,12 +518,12 @@ fn should_correctly_indent_fn_signature() { items: [doc::fntag({ sig: some("line 1\nline 2") with doc.cratemod().fns()[0] - })] + })]/~ with doc.cratemod() } with doc.cratedoc() }) - ] + ]/~ }; let markdown = test::write_markdown_str(doc); assert str::contains(markdown, " line 1\n line 2"); @@ -580,7 +580,7 @@ fn should_write_enum_description() { fn write_variants( ctxt: ctxt, - docs: [doc::variantdoc] + docs: [doc::variantdoc]/~ ) { if vec::is_empty(docs) { ret; @@ -644,7 +644,7 @@ fn write_iface(ctxt: ctxt, doc: doc::ifacedoc) { write_methods(ctxt, doc.methods); } -fn write_methods(ctxt: ctxt, docs: [doc::methoddoc]) { +fn write_methods(ctxt: ctxt, docs: [doc::methoddoc]/~) { vec::iter(docs) {|doc| write_method(ctxt, doc) } } diff --git a/src/rustdoc/markdown_writer.rs b/src/rustdoc/markdown_writer.rs index 6d3e02edb89..b1f7ef3a970 100644 --- a/src/rustdoc/markdown_writer.rs +++ b/src/rustdoc/markdown_writer.rs @@ -76,7 +76,7 @@ fn pandoc_writer( "--to=html", "--css=rust.css", "--output=" + filename - ]; + ]/~; generic_writer {|markdown| import io::writer_util; @@ -178,7 +178,7 @@ fn make_filename( } } doc::itempage(doc) { - str::connect(doc.path() + [doc.name()], "_") + str::connect(doc.path() + [doc.name()]/~, "_") } } }; @@ -247,7 +247,7 @@ mod test { fn write_file(path: str, s: str) { import io::writer_util; - alt io::file_writer(path, [io::create, io::truncate]) { + alt io::file_writer(path, [io::create, io::truncate]/~) { result::ok(writer) { writer.write_str(s); } diff --git a/src/rustdoc/page_pass.rs b/src/rustdoc/page_pass.rs index 4855a87e99a..dd00276b4b0 100644 --- a/src/rustdoc/page_pass.rs +++ b/src/rustdoc/page_pass.rs @@ -44,11 +44,11 @@ type page_port = comm::port>; type page_chan = comm::chan>; fn make_doc_from_pages(page_port: page_port) -> doc::doc { - let mut pages = []; + let mut pages = []/~; loop { let val = comm::recv(page_port); if option::is_some(val) { - pages += [option::unwrap(val)]; + pages += [option::unwrap(val)]/~; } else { break; } @@ -175,4 +175,4 @@ mod test { fn mk_doc(source: str) -> doc::doc { mk_doc_(config::doc_per_mod, source) } -} \ No newline at end of file +} diff --git a/src/rustdoc/par.rs b/src/rustdoc/par.rs index 38fb3ca51a7..c07c3d50c7c 100644 --- a/src/rustdoc/par.rs +++ b/src/rustdoc/par.rs @@ -1,14 +1,14 @@ export anymap, seqmap, parmap; -fn anymap(v: [T], f: fn~(T) -> U) -> [U] { +fn anymap(v: [T]/~, f: fn~(T) -> U) -> [U]/~ { parmap(v, f) } -fn seqmap(v: [T], f: fn(T) -> U) -> [U] { +fn seqmap(v: [T]/~, f: fn(T) -> U) -> [U]/~ { vec::map(v, f) } -fn parmap(v: [T], f: fn~(T) -> U) -> [U] unsafe { +fn parmap(v: [T]/~, f: fn~(T) -> U) -> [U]/~ unsafe { let futures = vec::map(v) {|elt| let po = comm::port(); let ch = comm::chan(po); @@ -25,7 +25,7 @@ fn parmap(v: [T], f: fn~(T) -> U) -> [U] unsafe { #[test] fn test_parallel_map() { - let i = [1, 2, 3, 4]; + let i = [1, 2, 3, 4]/~; let j = parmap(i) {|e| e + 1 }; - assert j == [2, 3, 4, 5]; + assert j == [2, 3, 4, 5]/~; } diff --git a/src/rustdoc/parse.rs b/src/rustdoc/parse.rs index 9b0291cd8a1..433cc825a31 100644 --- a/src/rustdoc/parse.rs +++ b/src/rustdoc/parse.rs @@ -12,12 +12,12 @@ export from_file, from_str, from_file_sess, from_str_sess; fn from_file(file: str) -> @ast::crate { parse::parse_crate_from_file( - file, [], parse::new_parse_sess(none)) + file, []/~, parse::new_parse_sess(none)) } fn from_str(source: str) -> @ast::crate { parse::parse_crate_from_source_str( - "-", @source, [], parse::new_parse_sess(none)) + "-", @source, []/~, parse::new_parse_sess(none)) } fn from_file_sess(sess: session::session, file: str) -> @ast::crate { diff --git a/src/rustdoc/path_pass.rs b/src/rustdoc/path_pass.rs index 41d97275c55..f0411d0b702 100644 --- a/src/rustdoc/path_pass.rs +++ b/src/rustdoc/path_pass.rs @@ -13,14 +13,14 @@ fn mk_pass() -> pass { type ctxt = { srv: astsrv::srv, - mut path: [str] + mut path: [str]/~ }; #[warn(no_non_implicitly_copyable_typarams)] fn run(srv: astsrv::srv, doc: doc::doc) -> doc::doc { let ctxt = { srv: srv, - mut path: [] + mut path: []/~ }; let fold = fold::fold({ fold_item: fold_item, @@ -70,9 +70,9 @@ fn should_record_mod_paths() { let doc = extract::from_srv(srv, ""); let doc = run(srv, doc); assert doc.cratemod().mods()[0].mods()[0].mods()[0].path() - == ["a", "b"]; + == ["a", "b"]/~; assert doc.cratemod().mods()[0].mods()[1].mods()[0].path() - == ["a", "d"]; + == ["a", "d"]/~; } } @@ -82,7 +82,7 @@ fn should_record_fn_paths() { astsrv::from_str(source) {|srv| let doc = extract::from_srv(srv, ""); let doc = run(srv, doc); - assert doc.cratemod().mods()[0].fns()[0].path() == ["a"]; + assert doc.cratemod().mods()[0].fns()[0].path() == ["a"]/~; } } @@ -92,7 +92,7 @@ fn should_record_native_mod_paths() { astsrv::from_str(source) {|srv| let doc = extract::from_srv(srv, ""); let doc = run(srv, doc); - assert doc.cratemod().mods()[0].nmods()[0].path() == ["a"]; + assert doc.cratemod().mods()[0].nmods()[0].path() == ["a"]/~; } } @@ -102,6 +102,6 @@ fn should_record_native_fn_paths() { astsrv::from_str(source) {|srv| let doc = extract::from_srv(srv, ""); let doc = run(srv, doc); - assert doc.cratemod().nmods()[0].fns[0].path() == ["a"]; + assert doc.cratemod().nmods()[0].fns[0].path() == ["a"]/~; } } diff --git a/src/rustdoc/prune_hidden_pass.rs b/src/rustdoc/prune_hidden_pass.rs index b7357644825..e2c09a33a7e 100644 --- a/src/rustdoc/prune_hidden_pass.rs +++ b/src/rustdoc/prune_hidden_pass.rs @@ -39,7 +39,7 @@ fn is_hidden(srv: astsrv::srv, doc: doc::itemdoc) -> bool { astsrv::exec(srv) {|ctxt| let attrs = alt ctxt.ast_map.get(id) { ast_map::node_item(item, _) { item.attrs } - _ { [] } + _ { []/~ } }; attr_parser::parse_hidden(attrs) } diff --git a/src/rustdoc/prune_unexported_pass.rs b/src/rustdoc/prune_unexported_pass.rs index f0eac40a297..b564af58794 100644 --- a/src/rustdoc/prune_unexported_pass.rs +++ b/src/rustdoc/prune_unexported_pass.rs @@ -30,7 +30,7 @@ fn fold_mod(fold: fold::fold, doc: doc::moddoc) -> doc::moddoc { } } -fn exported_items(srv: astsrv::srv, doc: doc::moddoc) -> [doc::itemtag] { +fn exported_items(srv: astsrv::srv, doc: doc::moddoc) -> [doc::itemtag]/~ { exported_things( srv, doc, exported_items_from_crate, @@ -41,9 +41,9 @@ fn exported_items(srv: astsrv::srv, doc: doc::moddoc) -> [doc::itemtag] { fn exported_things( srv: astsrv::srv, doc: doc::moddoc, - from_crate: fn(astsrv::srv, doc::moddoc) -> [T], - from_mod: fn(astsrv::srv, doc::moddoc) -> [T] -) -> [T] { + from_crate: fn(astsrv::srv, doc::moddoc) -> [T]/~, + from_mod: fn(astsrv::srv, doc::moddoc) -> [T]/~ +) -> [T]/~ { if doc.id() == ast::crate_node_id { from_crate(srv, doc) } else { @@ -54,14 +54,14 @@ fn exported_things( fn exported_items_from_crate( srv: astsrv::srv, doc: doc::moddoc -) -> [doc::itemtag] { +) -> [doc::itemtag]/~ { exported_items_from(srv, doc, is_exported_from_crate) } fn exported_items_from_mod( srv: astsrv::srv, doc: doc::moddoc -) -> [doc::itemtag] { +) -> [doc::itemtag]/~ { exported_items_from(srv, doc, {|a,b| is_exported_from_mod(a, doc.id(), b) }) @@ -71,7 +71,7 @@ fn exported_items_from( srv: astsrv::srv, doc: doc::moddoc, is_exported: fn(astsrv::srv, str) -> bool -) -> [doc::itemtag] { +) -> [doc::itemtag]/~ { vec::filter_map(doc.items) { |itemtag| let itemtag = alt itemtag { doc::enumtag(enumdoc) { @@ -96,7 +96,7 @@ fn exported_variants_from( srv: astsrv::srv, doc: doc::enumdoc, is_exported: fn(astsrv::srv, str) -> bool -) -> [doc::variantdoc] { +) -> [doc::variantdoc]/~ { vec::filter_map(doc.variants) { |doc| if is_exported(srv, doc.name) { some(doc) diff --git a/src/rustdoc/reexport_pass.rs b/src/rustdoc/reexport_pass.rs index 54beb013960..f9d1c502830 100644 --- a/src/rustdoc/reexport_pass.rs +++ b/src/rustdoc/reexport_pass.rs @@ -21,7 +21,7 @@ fn mk_pass() -> pass { type def_set = map::set; type def_map = map::hashmap; -type path_map = map::hashmap; +type path_map = map::hashmap; fn run(srv: astsrv::srv, doc: doc::doc) -> doc::doc { @@ -43,17 +43,17 @@ fn run(srv: astsrv::srv, doc: doc::doc) -> doc::doc { // to association lists. Yuck. fn to_assoc_list( map: map::hashmap -) -> [(K, V)] { +) -> [(K, V)]/~ { - let mut vec = []; + let mut vec = []/~; for map.each {|k, v| - vec += [(k, v)]; + vec += [(k, v)]/~; } ret vec; } fn from_assoc_list( - list: [(K, V)], + list: [(K, V)]/~, new_hash: fn() -> map::hashmap ) -> map::hashmap { @@ -66,13 +66,13 @@ fn from_assoc_list( } fn from_def_assoc_list( - list: [(ast::def_id, V)] + list: [(ast::def_id, V)]/~ ) -> map::hashmap { from_assoc_list(list, ast_util::new_def_hash) } fn from_str_assoc_list( - list: [(str, V)] + list: [(str, V)]/~ ) -> map::hashmap { from_assoc_list(list, map::str_hash) } @@ -96,10 +96,10 @@ fn build_reexport_def_set(srv: astsrv::srv) -> def_set { from_def_assoc_list(assoc_list) } -fn find_reexport_impls(ctxt: astsrv::ctxt) -> [ast::def_id] { - let defs = @mut []; +fn find_reexport_impls(ctxt: astsrv::ctxt) -> [ast::def_id]/~ { + let defs = @mut []/~; for_each_reexported_impl(ctxt) {|_mod_id, i| - *defs += [i.did] + *defs += [i.did]/~ } ret *defs; } @@ -169,7 +169,7 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map { let assoc_list = astsrv::exec(srv) {|ctxt| let def_map = from_def_assoc_list(def_assoc_list); - let path_map = map::str_hash::<[(str,doc::itemtag)]>(); + let path_map = map::str_hash::<[(str,doc::itemtag)]/~>(); for ctxt.exp_map.each {|exp_id, defs| let path = alt check ctxt.ast_map.get(exp_id) { @@ -182,12 +182,12 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map { }; let modpath = ast_map::path_to_str(vec::init(*path)); - let mut reexportdocs = []; + let mut reexportdocs = []/~; for defs.each {|def| if !def.reexp { cont; } alt def_map.find(def.id) { some(itemtag) { - reexportdocs += [(*name, itemtag)]; + reexportdocs += [(*name, itemtag)]/~; } _ {} } @@ -208,8 +208,8 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map { for find_reexport_impl_docs(ctxt, def_map).each {|elt| let (path, doc) = elt; let docs = alt path_map.find(path) { - some(docs) { docs + [(doc)] } - none { [doc] } + some(docs) { docs + [(doc)]/~ } + none { [doc]/~ } }; path_map.insert(path, docs); } @@ -223,8 +223,8 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map { fn find_reexport_impl_docs( ctxt: astsrv::ctxt, def_map: def_map -) -> [(str, (str, doc::itemtag))] { - let docs = @mut []; +) -> [(str, (str, doc::itemtag))]/~ { + let docs = @mut []/~; for_each_reexported_impl(ctxt) {|mod_id, i| let path = alt ctxt.ast_map.find(mod_id) { @@ -245,7 +245,7 @@ fn find_reexport_impl_docs( let doc = alt check def_map.find(i.did) { some(doc) { doc } }; - *docs += [(path, (ident, doc))]; + *docs += [(path, (ident, doc))]/~; } ret *docs; @@ -322,7 +322,7 @@ fn merge_reexports( let path = if is_topmod { doc.path() } else { - doc.path() + [doc.name()] + doc.path() + [doc.name()]/~ }; let new_items = get_new_items(path, fold.ctxt); @@ -334,16 +334,16 @@ fn merge_reexports( } } - fn get_new_items(path: [str], path_map: path_map) -> [doc::itemtag] { + fn get_new_items(path: [str]/~, path_map: path_map) -> [doc::itemtag]/~ { #debug("looking for reexports in path %?", path); alt path_map.find(str::connect(path, "::")) { some(name_docs) { - vec::foldl([], name_docs) {|v, name_doc| + vec::foldl([]/~, name_docs) {|v, name_doc| let (name, doc) = name_doc; - v + [reexport_doc(doc, name)] + v + [reexport_doc(doc, name)]/~ } } - none { [] } + none { []/~ } } } diff --git a/src/rustdoc/rustdoc.rs b/src/rustdoc/rustdoc.rs index 4427d1bfa49..07749fcb41c 100755 --- a/src/rustdoc/rustdoc.rs +++ b/src/rustdoc/rustdoc.rs @@ -11,7 +11,7 @@ type pass = { fn run_passes( srv: astsrv::srv, doc: doc::doc, - passes: [pass] + passes: [pass]/~ ) -> doc::doc { #[doc( @@ -55,11 +55,11 @@ fn test_run_passes() { name: doc.cratemod().name() + "two" with doc.cratemod().item }, - items: [], + items: []/~, index: none } }) - ] + ]/~ } } fn pass2( @@ -74,11 +74,11 @@ fn test_run_passes() { name: doc.cratemod().name() + "three" with doc.cratemod().item }, - items: [], + items: []/~, index: none } }) - ] + ]/~ } } let source = ""; @@ -92,14 +92,14 @@ fn test_run_passes() { name: "", f: pass2 } - ]; + ]/~; let doc = extract::from_srv(srv, "one"); let doc = run_passes(srv, doc, passes); assert doc.cratemod().name() == "onetwothree"; } } -fn main(args: [str]) { +fn main(args: [str]/~) { if vec::contains(args, "-h") { config::usage(); @@ -156,6 +156,6 @@ fn run(config: config::config) { markdown_pass::mk_pass( markdown_writer::make_writer_factory(config) ) - ]); + ]/~); } -} \ No newline at end of file +} diff --git a/src/rustdoc/sectionalize_pass.rs b/src/rustdoc/sectionalize_pass.rs index 143c0e1350a..9a92d527115 100644 --- a/src/rustdoc/sectionalize_pass.rs +++ b/src/rustdoc/sectionalize_pass.rs @@ -64,7 +64,7 @@ fn fold_impl(fold: fold::fold<()>, doc: doc::impldoc) -> doc::impldoc { } } -fn sectionalize(desc: option) -> (option, [doc::section]) { +fn sectionalize(desc: option) -> (option, [doc::section]/~) { #[doc = " @@ -85,20 +85,20 @@ fn sectionalize(desc: option) -> (option, [doc::section]) { "]; if option::is_none(desc) { - ret (none, []); + ret (none, []/~); } let lines = str::lines(option::get(desc)); let mut new_desc = none::; let mut current_section = none; - let mut sections = []; + let mut sections = []/~; for lines.each {|line| alt parse_header(line) { some(header) { if option::is_some(current_section) { - sections += [option::get(current_section)]; + sections += [option::get(current_section)]/~; } current_section = some({ header: header, @@ -129,7 +129,7 @@ fn sectionalize(desc: option) -> (option, [doc::section]) { } if option::is_some(current_section) { - sections += [option::get(current_section)]; + sections += [option::get(current_section)]/~; } (new_desc, sections) diff --git a/src/rustdoc/text_pass.rs b/src/rustdoc/text_pass.rs index bbeaf4b9d2f..fb34cb8ae84 100644 --- a/src/rustdoc/text_pass.rs +++ b/src/rustdoc/text_pass.rs @@ -44,7 +44,7 @@ fn fold_item(fold: fold::fold, doc: doc::itemdoc) -> doc::itemdoc { } } -fn apply_to_sections(op: op, sections: [doc::section]) -> [doc::section] { +fn apply_to_sections(op: op, sections: [doc::section]/~) -> [doc::section]/~ { par::anymap(sections) {|section, copy op| { header: op(section.header), @@ -76,7 +76,7 @@ fn fold_iface(fold: fold::fold, doc: doc::ifacedoc) -> doc::ifacedoc { } } -fn apply_to_methods(op: op, docs: [doc::methoddoc]) -> [doc::methoddoc] { +fn apply_to_methods(op: op, docs: [doc::methoddoc]/~) -> [doc::methoddoc]/~ { par::anymap(docs) {|doc, copy op| { brief: maybe_apply_op(op, doc.brief), diff --git a/src/rustdoc/trim_pass.rs b/src/rustdoc/trim_pass.rs index c0e9560ea02..e3c79de22bb 100644 --- a/src/rustdoc/trim_pass.rs +++ b/src/rustdoc/trim_pass.rs @@ -29,4 +29,4 @@ mod test { mk_pass().f(srv, doc) } } -} \ No newline at end of file +} diff --git a/src/rustdoc/tystr_pass.rs b/src/rustdoc/tystr_pass.rs index e18be9b4301..abb42578d29 100644 --- a/src/rustdoc/tystr_pass.rs +++ b/src/rustdoc/tystr_pass.rs @@ -151,8 +151,8 @@ fn fold_iface( fn merge_methods( srv: astsrv::srv, item_id: doc::ast_id, - docs: [doc::methoddoc] -) -> [doc::methoddoc] { + docs: [doc::methoddoc]/~ +) -> [doc::methoddoc]/~ { par::anymap(docs) {|doc| { sig: get_method_sig(srv, item_id, doc.name) diff --git a/src/rustdoc/unindent_pass.rs b/src/rustdoc/unindent_pass.rs index 342e0914647..c7063f4ca05 100644 --- a/src/rustdoc/unindent_pass.rs +++ b/src/rustdoc/unindent_pass.rs @@ -62,7 +62,7 @@ fn unindent(s: str) -> str { }; if check vec::is_not_empty(lines) { - let unindented = [str::trim(vec::head(lines))] + let unindented = [str::trim(vec::head(lines))]/~ + par::anymap(vec::tail(lines)) {|line| if str::is_whitespace(line) { line diff --git a/src/test/auxiliary/cci_class_6.rs b/src/test/auxiliary/cci_class_6.rs index 15fa3e3e785..8460b670753 100644 --- a/src/test/auxiliary/cci_class_6.rs +++ b/src/test/auxiliary/cci_class_6.rs @@ -2,17 +2,17 @@ mod kitties { class cat { priv { - let mut info : [U]; + let mut info : [U]/~; let mut meows : uint; } let how_hungry : int; - new(in_x : uint, in_y : int, -in_info: [U]) + new(in_x : uint, in_y : int, -in_info: [U]/~) { self.meows = in_x; self.how_hungry = in_y; self.info <- in_info; } - fn speak(stuff: [T]) { + fn speak(stuff: [T]/~) { self.meows += stuff.len(); } fn meow_count() -> uint { self.meows } diff --git a/src/test/auxiliary/cci_iter_lib.rs b/src/test/auxiliary/cci_iter_lib.rs index 50d9df243e7..af575dc00f6 100644 --- a/src/test/auxiliary/cci_iter_lib.rs +++ b/src/test/auxiliary/cci_iter_lib.rs @@ -1,7 +1,7 @@ #[link(name="cci_iter_lib", vers="0.0")]; #[inline] -fn iter(v: [T], f: fn(T)) { +fn iter(v: [T]/~, f: fn(T)) { let mut i = 0u; let n = vec::len(v); while i < n { diff --git a/src/test/auxiliary/cci_no_inline_lib.rs b/src/test/auxiliary/cci_no_inline_lib.rs index 42f959a4a7d..468ebeb198c 100644 --- a/src/test/auxiliary/cci_no_inline_lib.rs +++ b/src/test/auxiliary/cci_no_inline_lib.rs @@ -1,7 +1,7 @@ #[link(name="cci_no_inline_lib", vers="0.0")]; // same as cci_iter_lib, more-or-less, but not marked inline -fn iter(v: [uint], f: fn(uint)) { +fn iter(v: [uint]/~, f: fn(uint)) { let mut i = 0u; let n = vec::len(v); while i < n { diff --git a/src/test/auxiliary/issue2378a.rs b/src/test/auxiliary/issue2378a.rs index 35269a2d653..dfe0f582435 100644 --- a/src/test/auxiliary/issue2378a.rs +++ b/src/test/auxiliary/issue2378a.rs @@ -1,7 +1,7 @@ enum maybe { just(T), nothing } impl methods for maybe { - fn [](idx: uint) -> T { + fn []/~(idx: uint) -> T { alt self { just(t) { t } nothing { fail; } diff --git a/src/test/auxiliary/issue2378b.rs b/src/test/auxiliary/issue2378b.rs index 9cfe4d3a368..e26be8c6973 100644 --- a/src/test/auxiliary/issue2378b.rs +++ b/src/test/auxiliary/issue2378b.rs @@ -6,7 +6,7 @@ import issue2378a::methods; type two_maybes = {a: maybe, b: maybe}; impl methods for two_maybes { - fn [](idx: uint) -> (T, T) { + fn []/~(idx: uint) -> (T, T) { (self.a[idx], self.b[idx]) } } \ No newline at end of file diff --git a/src/test/bench/core-uint-to-str.rs b/src/test/bench/core-uint-to-str.rs index 026ce3fe061..a711ae5dd46 100644 --- a/src/test/bench/core-uint-to-str.rs +++ b/src/test/bench/core-uint-to-str.rs @@ -1,4 +1,4 @@ -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "10000000"] } else if args.len() <= 1u { diff --git a/src/test/bench/core-vec-append.rs b/src/test/bench/core-vec-append.rs index cde1c76e344..e46cf25fade 100644 --- a/src/test/bench/core-vec-append.rs +++ b/src/test/bench/core-vec-append.rs @@ -4,15 +4,15 @@ use std; import dvec::{dvec, extensions}; import io::writer_util; -fn collect_raw(num: uint) -> [uint] { - let mut result = []; +fn collect_raw(num: uint) -> [uint]/~ { + let mut result = []/~; for uint::range(0u, num) { |i| vec::push(result, i); } ret result; } -fn collect_dvec(num: uint) -> [mut uint] { +fn collect_dvec(num: uint) -> [mut uint]/~ { let result = dvec(); for uint::range(0u, num) { |i| result.push(i); @@ -20,7 +20,7 @@ fn collect_dvec(num: uint) -> [mut uint] { ret dvec::unwrap(result); } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "50000000"] } else if args.len() <= 1u { diff --git a/src/test/bench/graph500-bfs.rs b/src/test/bench/graph500-bfs.rs index 56733b29178..edad5e0dbba 100644 --- a/src/test/bench/graph500-bfs.rs +++ b/src/test/bench/graph500-bfs.rs @@ -16,10 +16,10 @@ import comm::*; import int::abs; type node_id = i64; -type graph = [[node_id]]; -type bfs_result = [node_id]; +type graph = [[node_id]/~]/~; +type bfs_result = [node_id]/~; -fn make_edges(scale: uint, edgefactor: uint) -> [(node_id, node_id)] { +fn make_edges(scale: uint, edgefactor: uint) -> [(node_id, node_id)]/~ { let r = rand::xorshift(); fn choose_edge(i: node_id, j: node_id, scale: uint, r: rand::rng) @@ -65,7 +65,7 @@ fn make_edges(scale: uint, edgefactor: uint) -> [(node_id, node_id)] { } } -fn make_graph(N: uint, edges: [(node_id, node_id)]) -> graph { +fn make_graph(N: uint, edges: [(node_id, node_id)]/~) -> graph { let graph = vec::from_fn(N) {|_i| map::hashmap::({|x| x as uint }, {|x, y| x == y }) }; @@ -82,7 +82,7 @@ fn make_graph(N: uint, edges: [(node_id, node_id)]) -> graph { } } -fn gen_search_keys(graph: graph, n: uint) -> [node_id] { +fn gen_search_keys(graph: graph, n: uint) -> [node_id]/~ { let keys = map::hashmap::({|x| x as uint }, {|x, y| x == y }); let r = rand::rng(); @@ -102,7 +102,7 @@ fn gen_search_keys(graph: graph, n: uint) -> [node_id] { Nodes that are unreachable have a parent of -1."] fn bfs(graph: graph, key: node_id) -> bfs_result { - let marks : [mut node_id] + let marks : [mut node_id]/~ = vec::to_mut(vec::from_elem(vec::len(graph), -1i64)); let Q = deque::create(); @@ -278,7 +278,7 @@ fn pbfs(&&graph: arc::arc, key: node_id) -> bfs_result { } #[doc="Performs at least some of the validation in the Graph500 spec."] -fn validate(edges: [(node_id, node_id)], +fn validate(edges: [(node_id, node_id)]/~, root: node_id, tree: bfs_result) -> bool { // There are 5 things to test. Below is code for each of them. @@ -293,7 +293,7 @@ fn validate(edges: [(node_id, node_id)], let mut status = true; let level = tree.map() {|parent| let mut parent = parent; - let mut path = []; + let mut path = []/~; if parent == -1i64 { // This node was not in the tree. @@ -371,7 +371,7 @@ fn validate(edges: [(node_id, node_id)], true } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "15", "48"] } else if args.len() <= 1u { diff --git a/src/test/bench/msgsend-ring-new.rs b/src/test/bench/msgsend-ring-new.rs index cba62f2ada1..b8e74520b8f 100644 --- a/src/test/bench/msgsend-ring-new.rs +++ b/src/test/bench/msgsend-ring-new.rs @@ -21,7 +21,7 @@ fn thread_ring(i: uint, }; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "100", "10000"] } else if args.len() <= 1u { @@ -39,7 +39,7 @@ fn main(args: [str]) { let start = time::precise_time_s(); // create the ring - let mut futures = []; + let mut futures = []/~; for uint::range(1u, num_tasks) {|i| let get_chan = port(); @@ -50,7 +50,7 @@ fn main(args: [str]) { let p = port(); get_chan_chan.send(chan(p)); thread_ring(i, msg_per_task, num_chan, p) - }]; + }]/~; } num_chan = get_chan.recv(); diff --git a/src/test/bench/msgsend-ring.rs b/src/test/bench/msgsend-ring.rs index 442b6e62dd1..5b59005e572 100644 --- a/src/test/bench/msgsend-ring.rs +++ b/src/test/bench/msgsend-ring.rs @@ -21,7 +21,7 @@ fn thread_ring(i: uint, }; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "100", "10000"] } else if args.len() <= 1u { @@ -39,7 +39,7 @@ fn main(args: [str]) { let start = time::precise_time_s(); // create the ring - let mut futures = []; + let mut futures = []/~; for uint::range(1u, num_tasks) {|i| let get_chan = port(); @@ -49,7 +49,7 @@ fn main(args: [str]) { let p = port(); get_chan_chan.send(chan(p)); thread_ring(i, msg_per_task, num_chan, p) - }]; + }]/~; num_chan = get_chan.recv(); }; diff --git a/src/test/bench/msgsend.rs b/src/test/bench/msgsend.rs index b6a6ba60a9f..cea712635a0 100644 --- a/src/test/bench/msgsend.rs +++ b/src/test/bench/msgsend.rs @@ -27,7 +27,7 @@ fn server(requests: comm::port, responses: comm::chan) { comm::send(responses, count); } -fn run(args: [str]) { +fn run(args: [str]/~) { let from_child = comm::port(); let to_parent = comm::chan(from_child); let to_child = task::spawn_listener {|po| @@ -37,7 +37,7 @@ fn run(args: [str]) { let workers = option::get(uint::from_str(args[2])); let start = std::time::precise_time_s(); let to_child = to_child; - let mut worker_results = []; + let mut worker_results = []/~; for uint::range(0u, workers) {|_i| let builder = task::builder(); vec::push(worker_results, task::future_result(builder)); @@ -58,7 +58,7 @@ fn run(args: [str]) { io::stdout().write_str(#fmt("Throughput=%f per sec\n", thruput)); } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "1000000", "10000"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-ackermann.rs b/src/test/bench/shootout-ackermann.rs index 8969bdfc0b7..8f1f95b2581 100644 --- a/src/test/bench/shootout-ackermann.rs +++ b/src/test/bench/shootout-ackermann.rs @@ -12,7 +12,7 @@ fn ack(m: int, n: int) -> int { } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "12"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-binarytrees.rs b/src/test/bench/shootout-binarytrees.rs index 6486602cc6e..b2769a36e89 100644 --- a/src/test/bench/shootout-binarytrees.rs +++ b/src/test/bench/shootout-binarytrees.rs @@ -22,7 +22,7 @@ fn bottom_up_tree(arena: &a.arena::arena, item: int, depth: int) -> &a.tree { ret new(*arena) nil; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "17"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-fannkuchredux.rs b/src/test/bench/shootout-fannkuchredux.rs index 654f75d8260..7a518e8c394 100644 --- a/src/test/bench/shootout-fannkuchredux.rs +++ b/src/test/bench/shootout-fannkuchredux.rs @@ -58,7 +58,7 @@ fn fannkuch(n: int) -> int { ret flips; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "10"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-fasta.rs b/src/test/bench/shootout-fasta.rs index 4910673cb29..0670f928adb 100644 --- a/src/test/bench/shootout-fasta.rs +++ b/src/test/bench/shootout-fasta.rs @@ -23,16 +23,16 @@ fn myrandom_next(r: myrandom, mx: u32) -> u32 { type aminoacids = {ch: char, prob: u32}; -fn make_cumulative(aa: [aminoacids]) -> [aminoacids] { +fn make_cumulative(aa: [aminoacids]/~) -> [aminoacids]/~ { let mut cp: u32 = 0u32; - let mut ans: [aminoacids] = []; - for aa.each {|a| cp += a.prob; ans += [{ch: a.ch, prob: cp}]; } + let mut ans: [aminoacids]/~ = []/~; + for aa.each {|a| cp += a.prob; ans += [{ch: a.ch, prob: cp}]/~; } ret ans; } -fn select_random(r: u32, genelist: [aminoacids]) -> char { +fn select_random(r: u32, genelist: [aminoacids]/~) -> char { if r < genelist[0].prob { ret genelist[0].ch; } - fn bisect(v: [aminoacids], lo: uint, hi: uint, target: u32) -> char { + fn bisect(v: [aminoacids]/~, lo: uint, hi: uint, target: u32) -> char { if hi > lo + 1u { let mid: uint = lo + (hi - lo) / 2u; if target < v[mid].prob { @@ -43,7 +43,7 @@ fn select_random(r: u32, genelist: [aminoacids]) -> char { ret bisect(genelist, 0u, vec::len::(genelist) - 1u, r); } -fn make_random_fasta(wr: io::writer, id: str, desc: str, genelist: [aminoacids], n: int) { +fn make_random_fasta(wr: io::writer, id: str, desc: str, genelist: [aminoacids]/~, n: int) { wr.write_line(">" + id + " " + desc); let rng = @{mut last: std::rand::rng().next()}; let mut op: str = ""; @@ -74,7 +74,7 @@ fn make_repeat_fasta(wr: io::writer, id: str, desc: str, s: str, n: int) unsafe fn acid(ch: char, prob: u32) -> aminoacids { ret {ch: ch, prob: prob}; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { // alioth tests k-nucleotide with this data at 25,000,000 ["", "5000000"] @@ -85,22 +85,22 @@ fn main(args: [str]) { }; let writer = if os::getenv("RUST_BENCH").is_some() { - result::get(io::file_writer("./shootout-fasta.data", [io::truncate, io::create])) + result::get(io::file_writer("./shootout-fasta.data", [io::truncate, io::create]/~)) } else { io::stdout() }; let n = int::from_str(args[1]).get(); - let iub: [aminoacids] = + let iub: [aminoacids]/~ = make_cumulative([acid('a', 27u32), acid('c', 12u32), acid('g', 12u32), acid('t', 27u32), acid('B', 2u32), acid('D', 2u32), acid('H', 2u32), acid('K', 2u32), acid('M', 2u32), acid('N', 2u32), acid('R', 2u32), acid('S', 2u32), - acid('V', 2u32), acid('W', 2u32), acid('Y', 2u32)]); - let homosapiens: [aminoacids] = + acid('V', 2u32), acid('W', 2u32), acid('Y', 2u32)]/~); + let homosapiens: [aminoacids]/~ = make_cumulative([acid('a', 30u32), acid('c', 20u32), acid('g', 20u32), - acid('t', 30u32)]); + acid('t', 30u32)]/~); let alu: str = "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG" + "GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA" + diff --git a/src/test/bench/shootout-fibo.rs b/src/test/bench/shootout-fibo.rs index df621254284..2f076343cfd 100644 --- a/src/test/bench/shootout-fibo.rs +++ b/src/test/bench/shootout-fibo.rs @@ -8,7 +8,7 @@ fn fib(n: int) -> int { } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "40"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-k-nucleotide.rs b/src/test/bench/shootout-k-nucleotide.rs index 26e8b938468..b6811db4b62 100644 --- a/src/test/bench/shootout-k-nucleotide.rs +++ b/src/test/bench/shootout-k-nucleotide.rs @@ -10,7 +10,7 @@ import std::map::hashmap; import std::sort; // given a map, print a sorted version of it -fn sort_and_fmt(mm: hashmap<[u8], uint>, total: uint) -> str { +fn sort_and_fmt(mm: hashmap<[u8]/~, uint>, total: uint) -> str { fn pct(xx: uint, yy: uint) -> float { ret (xx as float) * 100f / (yy as float); } @@ -28,14 +28,14 @@ fn sort_and_fmt(mm: hashmap<[u8], uint>, total: uint) -> str { } // sort by key, then by value - fn sortKV(orig: [(TT,UU)]) -> [(TT,UU)] { + fn sortKV(orig: [(TT,UU)]/~) -> [(TT,UU)]/~ { ret sort::merge_sort(le_by_val, sort::merge_sort(le_by_key, orig)); } - let mut pairs = []; + let mut pairs = []/~; // map -> [(k,%)] - mm.each(fn&(key: [u8], val: uint) -> bool { + mm.each(fn&(key: [u8]/~, val: uint) -> bool { vec::push(pairs, (key, pct(val, total))); ret true; }); @@ -44,7 +44,7 @@ fn sort_and_fmt(mm: hashmap<[u8], uint>, total: uint) -> str { let mut buffer = ""; - pairs_sorted.each(fn&(kv: ([u8], float)) -> bool unsafe { + pairs_sorted.each(fn&(kv: ([u8]/~, float)) -> bool unsafe { let (k,v) = kv; buffer += (#fmt["%s %0.3f\n", str::to_upper(str::unsafe::from_bytes(k)), v]); ret true; @@ -54,7 +54,7 @@ fn sort_and_fmt(mm: hashmap<[u8], uint>, total: uint) -> str { } // given a map, search for the frequency of a pattern -fn find(mm: hashmap<[u8], uint>, key: str) -> uint { +fn find(mm: hashmap<[u8]/~, uint>, key: str) -> uint { alt mm.find(str::bytes(str::to_lower(key))) { option::none { ret 0u; } option::some(num) { ret num; } @@ -62,17 +62,17 @@ fn find(mm: hashmap<[u8], uint>, key: str) -> uint { } // given a map, increment the counter for a key -fn update_freq(mm: hashmap<[u8], uint>, key: [u8]) { +fn update_freq(mm: hashmap<[u8]/~, uint>, key: [u8]/~) { alt mm.find(key) { option::none { mm.insert(key, 1u ); } option::some(val) { mm.insert(key, 1u + val); } } } -// given a [u8], for each window call a function +// given a [u8]/~, for each window call a function // i.e., for "hello" and windows of size four, // run it("hell") and it("ello"), then return "llo" -fn windows_with_carry(bb: [const u8], nn: uint, it: fn(window: [u8])) -> [u8] { +fn windows_with_carry(bb: [const u8]/~, nn: uint, it: fn(window: [u8]/~)) -> [u8]/~ { let mut ii = 0u; let len = vec::len(bb); @@ -84,18 +84,18 @@ fn windows_with_carry(bb: [const u8], nn: uint, it: fn(window: [u8])) -> [u8] { ret vec::slice(bb, len - (nn - 1u), len); } -fn make_sequence_processor(sz: uint, from_parent: comm::port<[u8]>, to_parent: comm::chan) { +fn make_sequence_processor(sz: uint, from_parent: comm::port<[u8]/~>, to_parent: comm::chan) { - let freqs: hashmap<[u8], uint> = map::bytes_hash(); - let mut carry: [u8] = []; + let freqs: hashmap<[u8]/~, uint> = map::bytes_hash(); + let mut carry: [u8]/~ = []/~; let mut total: uint = 0u; - let mut line: [u8]; + let mut line: [u8]/~; loop { line = comm::recv(from_parent); - if line == [] { break; } + if line == []/~ { break; } carry = windows_with_carry(carry + line, sz, { |window| update_freq(freqs, window); @@ -119,7 +119,7 @@ fn make_sequence_processor(sz: uint, from_parent: comm::port<[u8]>, to_parent: c } // given a FASTA file on stdin, process sequence THREE -fn main(args: [str]) { +fn main(args: [str]/~) { let rdr = if os::getenv("RUST_BENCH").is_some() { // FIXME: Using this compile-time env variable is a crummy way to // get to this massive data set, but #include_bin chokes on it (#2598) @@ -135,10 +135,10 @@ fn main(args: [str]) { // initialize each sequence sorter - let sizes = [1u,2u,3u,4u,6u,12u,18u]; + let sizes = [1u,2u,3u,4u,6u,12u,18u]/~; let from_child = vec::map (sizes, { |_sz| comm::port() }); let to_parent = vec::mapi(sizes, { |ii, _sz| comm::chan(from_child[ii]) }); - let to_child = vec::mapi(sizes, fn@(ii: uint, sz: uint) -> comm::chan<[u8]> { + let to_child = vec::mapi(sizes, fn@(ii: uint, sz: uint) -> comm::chan<[u8]/~> { ret task::spawn_listener { |from_parent| make_sequence_processor(sz, from_parent, to_parent[ii]); }; diff --git a/src/test/bench/shootout-mandelbrot.rs b/src/test/bench/shootout-mandelbrot.rs index 7254b5bbb9e..8b91f8310e7 100644 --- a/src/test/bench/shootout-mandelbrot.rs +++ b/src/test/bench/shootout-mandelbrot.rs @@ -17,7 +17,7 @@ import io::writer_util; import std::map::hashmap; type cmplx = {re: f64, im: f64}; -type line = {i: uint, b: [u8]}; +type line = {i: uint, b: [u8]/~}; impl arith for cmplx { fn *(x: cmplx) -> cmplx { @@ -65,7 +65,7 @@ fn fillbyte(x: cmplx, incr: f64) -> u8 { fn chanmb(i: uint, size: uint, ch: comm::chan) -> () { - let mut crv = []; + let mut crv = []/~; let incr = 2f64/(size as f64); let y = incr*(i as f64) - 1f64; let xincr = 8f64*incr; @@ -101,7 +101,7 @@ fn writer(path: str, writech: comm::chan>, size: uint) _ { result::get( io::file_writer(path, - [io::create, io::truncate])) + [io::create, io::truncate]/~)) } }; cout.write_line("P4"); @@ -121,7 +121,7 @@ fn writer(path: str, writech: comm::chan>, size: uint) #debug("WS %u", prev); // FIXME (#2280): this temporary shouldn't be // necessary, but seems to be, for borrowing. - let v : [u8] = lines.get(prev); + let v : [u8]/~ = lines.get(prev); cout.write(v); done += 1_u; lines.remove(prev); @@ -140,7 +140,7 @@ fn writer(path: str, writech: comm::chan>, size: uint) } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "4000", "10"] } else { diff --git a/src/test/bench/shootout-nbody.rs b/src/test/bench/shootout-nbody.rs index 9cc506887e4..518d1c39d85 100644 --- a/src/test/bench/shootout-nbody.rs +++ b/src/test/bench/shootout-nbody.rs @@ -13,7 +13,7 @@ native mod libc { fn sqrt(n: float) -> float; } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "4000000"] } else if args.len() <= 1u { @@ -22,7 +22,7 @@ fn main(args: [str]) { args }; let n = int::from_str(args[1]).get(); - let bodies: [Body::props] = NBodySystem::MakeNBodySystem(); + let bodies: [Body::props]/~ = NBodySystem::MakeNBodySystem(); io::println(#fmt("%f", NBodySystem::energy(bodies))); let mut i: int = 0; while i < n { NBodySystem::advance(bodies, 0.01); i += 1; } @@ -34,11 +34,11 @@ fn main(args: [str]) { mod NBodySystem { - fn MakeNBodySystem() -> [Body::props] { + fn MakeNBodySystem() -> [Body::props]/~ { // these each return a Body::props - let bodies: [Body::props] = + let bodies: [Body::props]/~ = [Body::sun(), Body::jupiter(), Body::saturn(), Body::uranus(), - Body::neptune()]; + Body::neptune()]/~; let mut px: float = 0.0; let mut py: float = 0.0; @@ -59,7 +59,7 @@ mod NBodySystem { ret bodies; } - fn advance(bodies: [Body::props], dt: float) { + fn advance(bodies: [Body::props]/~, dt: float) { let mut i: int = 0; while i < 5 { @@ -98,7 +98,7 @@ mod NBodySystem { b.z += dt * b.vz; } - fn energy(bodies: [Body::props]) -> float unsafe { + fn energy(bodies: [Body::props]/~) -> float unsafe { let mut dx: float; let mut dy: float; let mut dz: float; diff --git a/src/test/bench/shootout-pfib.rs b/src/test/bench/shootout-pfib.rs index bb922c93e61..b81d27df5b5 100644 --- a/src/test/bench/shootout-pfib.rs +++ b/src/test/bench/shootout-pfib.rs @@ -46,8 +46,8 @@ fn fib(n: int) -> int { type config = {stress: bool}; -fn parse_opts(argv: [str]) -> config { - let opts = [getopts::optflag("stress")]; +fn parse_opts(argv: [str]/~) -> config { + let opts = [getopts::optflag("stress")]/~; let opt_args = vec::slice(argv, 1u, vec::len(argv)); @@ -69,16 +69,16 @@ fn stress_task(&&id: int) { } fn stress(num_tasks: int) { - let mut results = []; + let mut results = []/~; for range(0, num_tasks) {|i| let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {|| stress_task(i); } } for results.each {|r| future::get(r); } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "20"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-spectralnorm.rs b/src/test/bench/shootout-spectralnorm.rs index 22b2b332586..37e3380554b 100644 --- a/src/test/bench/shootout-spectralnorm.rs +++ b/src/test/bench/shootout-spectralnorm.rs @@ -6,7 +6,7 @@ fn eval_A(i: uint, j: uint) -> float { 1.0/(((i+j)*(i+j+1u)/2u+i+1u) as float) } -fn eval_A_times_u(u: [const float], Au: [mut float]) { +fn eval_A_times_u(u: [const float]/~, Au: [mut float]/~) { let N = vec::len(u); let mut i = 0u; while i < N { @@ -20,7 +20,7 @@ fn eval_A_times_u(u: [const float], Au: [mut float]) { } } -fn eval_At_times_u(u: [const float], Au: [mut float]) { +fn eval_At_times_u(u: [const float]/~, Au: [mut float]/~) { let N = vec::len(u); let mut i = 0u; while i < N { @@ -34,13 +34,13 @@ fn eval_At_times_u(u: [const float], Au: [mut float]) { } } -fn eval_AtA_times_u(u: [const float], AtAu: [mut float]) { +fn eval_AtA_times_u(u: [const float]/~, AtAu: [mut float]/~) { let v = vec::to_mut(vec::from_elem(vec::len(u), 0.0)); eval_A_times_u(u, v); eval_At_times_u(v, AtAu); } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "2000"] } else if args.len() <= 1u { diff --git a/src/test/bench/shootout-threadring.rs b/src/test/bench/shootout-threadring.rs index 5f9219d0804..9d8c5cc165b 100644 --- a/src/test/bench/shootout-threadring.rs +++ b/src/test/bench/shootout-threadring.rs @@ -37,7 +37,7 @@ fn roundtrip(id: int, p: comm::port, ch: comm::chan) { } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "2000000"] } else if args.len() <= 1u { diff --git a/src/test/bench/std-smallintmap.rs b/src/test/bench/std-smallintmap.rs index 86b13dfc171..b043264ff93 100644 --- a/src/test/bench/std-smallintmap.rs +++ b/src/test/bench/std-smallintmap.rs @@ -17,7 +17,7 @@ fn check_sequential(min: uint, max: uint, map: smallintmap) { } } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "100000", "100"] } else if args.len() <= 1u { diff --git a/src/test/bench/sudoku.rs b/src/test/bench/sudoku.rs index 3973905c807..5b3b104a342 100644 --- a/src/test/bench/sudoku.rs +++ b/src/test/bench/sudoku.rs @@ -22,7 +22,7 @@ import io::{writer_util, reader_util}; export grid_t, read_grid, solve_grid, write_grid; // internal type of sudoku grids -type grid = [[mut u8]]; +type grid = [[mut u8]/~]/~; // exported type of sudoku grids enum grid_t { grid_ctor(grid), } @@ -92,11 +92,11 @@ fn solve_grid(g: grid_t) { } } - let mut work: [(u8, u8)] = []; /* queue of uncolored fields */ + let mut work: [(u8, u8)]/~ = []/~; /* queue of uncolored fields */ for u8::range(0u8, 9u8) { |row| for u8::range(0u8, 9u8) { |col| let color = (*g)[row][col]; - if color == 0u8 { work += [(row, col)]; } + if color == 0u8 { work += [(row, col)]/~; } } } @@ -126,7 +126,7 @@ fn write_grid(f: io::writer, g: grid_t) { } } -fn main(args: [str]) { +fn main(args: [str]/~) { let grid = if vec::len(args) == 1u { // FIXME create sudoku inline since nested vec consts dont work yet // (#571) diff --git a/src/test/bench/task-perf-alloc-unwind.rs b/src/test/bench/task-perf-alloc-unwind.rs index aee9b148cca..f10e198e29f 100644 --- a/src/test/bench/task-perf-alloc-unwind.rs +++ b/src/test/bench/task-perf-alloc-unwind.rs @@ -36,7 +36,7 @@ enum st { fn_box: fn@() -> @nillist, fn_unique: fn~() -> ~nillist, tuple: (@nillist, ~nillist), - vec: [@nillist], + vec: [@nillist]/~, res: r }) } @@ -62,7 +62,7 @@ fn recurse_or_fail(depth: int, st: option) { fn_box: fn@() -> @nillist { @nil::<()> }, fn_unique: fn~() -> ~nillist { ~nil::<()> }, tuple: (@nil, ~nil), - vec: [@nil], + vec: [@nil]/~, res: r(@nil) }) } @@ -77,7 +77,7 @@ fn recurse_or_fail(depth: int, st: option) { fn_unique: fn~() -> ~nillist { ~cons((), @*fn_unique()) }, tuple: (@cons((), first(st.tuple)), ~cons((), @*second(st.tuple))), - vec: st.vec + [@cons((), st.vec.last())], + vec: st.vec + [@cons((), st.vec.last())]/~, res: r(@cons((), st.res._l)) }) } diff --git a/src/test/bench/task-perf-one-million.rs b/src/test/bench/task-perf-one-million.rs index 5faf7f85857..14896a3e73e 100644 --- a/src/test/bench/task-perf-one-million.rs +++ b/src/test/bench/task-perf-one-million.rs @@ -9,7 +9,7 @@ enum msg { fn calc(children: uint, parent_ch: comm::chan) { let port = comm::port(); let chan = comm::chan(port); - let mut child_chs = []; + let mut child_chs = []/~; let mut sum = 0; iter::repeat (children) {|| @@ -45,7 +45,7 @@ fn calc(children: uint, parent_ch: comm::chan) { comm::send(parent_ch, done(sum + 1)); } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "100000"] } else if args.len() <= 1u { @@ -69,4 +69,4 @@ fn main(args: [str]) { done(sum) { sum } }; #error("How many tasks? %d tasks.", sum); -} \ No newline at end of file +} diff --git a/src/test/bench/task-perf-spawnalot.rs b/src/test/bench/task-perf-spawnalot.rs index c3dd93ce117..686a2175061 100644 --- a/src/test/bench/task-perf-spawnalot.rs +++ b/src/test/bench/task-perf-spawnalot.rs @@ -8,7 +8,7 @@ fn f(&&n: uint) { fn g() { } -fn main(args: [str]) { +fn main(args: [str]/~) { let args = if os::getenv("RUST_BENCH").is_some() { ["", "400"] } else if args.len() <= 1u { diff --git a/src/test/bench/task-perf-word-count-generic.rs b/src/test/bench/task-perf-word-count-generic.rs index c4e51902628..e4528333373 100644 --- a/src/test/bench/task-perf-word-count-generic.rs +++ b/src/test/bench/task-perf-word-count-generic.rs @@ -92,12 +92,12 @@ mod map_reduce { fn start_mappers( map: mapper, - ctrl: chan>, inputs: [K1]) - -> [joinable_task] + ctrl: chan>, inputs: [K1]/~) + -> [joinable_task]/~ { - let mut tasks = []; + let mut tasks = []/~; for inputs.each {|i| - tasks += [spawn_joinable {|| map_task(map, ctrl, i)}]; + tasks += [spawn_joinable {|| map_task(map, ctrl, i)}]/~; } ret tasks; } @@ -176,7 +176,7 @@ mod map_reduce { fn map_reduce( map: mapper, reduce: reducer, - inputs: [K1]) + inputs: [K1]/~) { let ctrl = port(); @@ -209,7 +209,7 @@ mod map_reduce { let r = reduce, kk = k; tasks += [ spawn_joinable {|| reduce_task(r, kk, ch) } - ]; + ]/~; c = recv(p); treemap::insert(reducers, k, c); } @@ -229,7 +229,7 @@ mod map_reduce { } } -fn main(argv: [str]) { +fn main(argv: [str]/~) { if vec::len(argv) < 2u { let out = io::stdout(); diff --git a/src/test/bench/task-perf-word-count.rs b/src/test/bench/task-perf-word-count.rs index 12fd0943b4c..a1493badbf3 100644 --- a/src/test/bench/task-perf-word-count.rs +++ b/src/test/bench/task-perf-word-count.rs @@ -58,12 +58,12 @@ mod map_reduce { enum reduce_proto { emit_val(int), done, ref, release, } - fn start_mappers(ctrl: chan, -inputs: [str]) -> - [future::future] { - let mut results = []; + fn start_mappers(ctrl: chan, -inputs: [str]/~) -> + [future::future]/~ { + let mut results = []/~; for inputs.each {|i| let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {|| map_task(ctrl, i)} } ret results; @@ -128,7 +128,7 @@ mod map_reduce { reduce(key, {||get(p, state)}); } - fn map_reduce(-inputs: [str]) { + fn map_reduce(-inputs: [str]/~) { let ctrl = port::(); // This task becomes the master control task. It task::_spawns @@ -161,7 +161,7 @@ mod map_reduce { let p = port(); let ch = chan(p); let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {||reduce_task(k, ch)} c = recv(p); reducers.insert(k, c); @@ -178,9 +178,9 @@ mod map_reduce { } } -fn main(argv: [str]) { +fn main(argv: [str]/~) { let inputs = if vec::len(argv) < 2u { - [input1(), input2(), input3()] + [input1(), input2(), input3()]/~ } else { vec::map(vec::slice(argv, 1u, vec::len(argv)), {|f| result::get(io::read_whole_file_str(f)) }) diff --git a/src/test/compile-fail/ambig_impl_unify.rs b/src/test/compile-fail/ambig_impl_unify.rs index 2f020742d54..038ade94bd4 100644 --- a/src/test/compile-fail/ambig_impl_unify.rs +++ b/src/test/compile-fail/ambig_impl_unify.rs @@ -1,12 +1,12 @@ -impl methods for [uint] { +impl methods for [uint]/~ { fn foo() -> int {1} //! NOTE candidate #1 is `methods::foo` } -impl methods for [int] { +impl methods for [int]/~ { fn foo() -> int {2} //! NOTE candidate #2 is `methods::foo` } fn main() { - let x = []; + let x = []/~; x.foo(); //! ERROR multiple applicable methods in scope } \ No newline at end of file diff --git a/src/test/compile-fail/assign-super.rs b/src/test/compile-fail/assign-super.rs index b50f2d95f56..70532cef3ae 100644 --- a/src/test/compile-fail/assign-super.rs +++ b/src/test/compile-fail/assign-super.rs @@ -1,5 +1,5 @@ fn main() { - let mut x: [mut int] = [mut 3]; - let y: [int] = [3]; + let mut x: [mut int]/~ = [mut 3]/~; + let y: [int]/~ = [3]/~; x = y; //! ERROR values differ in mutability } \ No newline at end of file diff --git a/src/test/compile-fail/attr-bad-meta.rs b/src/test/compile-fail/attr-bad-meta.rs index ec883d5ba20..f1f04c8ae98 100644 --- a/src/test/compile-fail/attr-bad-meta.rs +++ b/src/test/compile-fail/attr-bad-meta.rs @@ -2,4 +2,4 @@ // asterisk is bogus #[attr*] -mod m { } \ No newline at end of file +mod m { } diff --git a/src/test/compile-fail/bad-expr-path.rs b/src/test/compile-fail/bad-expr-path.rs index d70d935f602..34690d29a3c 100644 --- a/src/test/compile-fail/bad-expr-path.rs +++ b/src/test/compile-fail/bad-expr-path.rs @@ -2,4 +2,4 @@ mod m1 { } -fn main(args: [str]) { log(debug, m1::a); } +fn main(args: [str]/~) { log(debug, m1::a); } diff --git a/src/test/compile-fail/bad-expr-path2.rs b/src/test/compile-fail/bad-expr-path2.rs index da6cfb652b5..14bcfdf8352 100644 --- a/src/test/compile-fail/bad-expr-path2.rs +++ b/src/test/compile-fail/bad-expr-path2.rs @@ -4,4 +4,4 @@ mod m1 { mod a { } } -fn main(args: [str]) { log(debug, m1::a); } +fn main(args: [str]/~) { log(debug, m1::a); } diff --git a/src/test/compile-fail/bad-main.rs b/src/test/compile-fail/bad-main.rs index 06bcbf5d9c2..775f1137e1b 100644 --- a/src/test/compile-fail/bad-main.rs +++ b/src/test/compile-fail/bad-main.rs @@ -1,3 +1,3 @@ -// error-pattern:expecting `native fn([str]) +// error-pattern:expecting `native fn([str]/~) fn main(x: int) { } diff --git a/src/test/compile-fail/bad-module.rs b/src/test/compile-fail/bad-module.rs index 662e3108bf0..d96ea37cc78 100644 --- a/src/test/compile-fail/bad-module.rs +++ b/src/test/compile-fail/bad-module.rs @@ -1,4 +1,4 @@ // error-pattern: unresolved import import thing; -fn main() { let foo = thing::len([]); } +fn main() { let foo = thing::len([]/~); } diff --git a/src/test/compile-fail/block-arg-as-stmt-with-value.rs b/src/test/compile-fail/block-arg-as-stmt-with-value.rs index dfa39082a94..76519ff6dad 100644 --- a/src/test/compile-fail/block-arg-as-stmt-with-value.rs +++ b/src/test/compile-fail/block-arg-as-stmt-with-value.rs @@ -1,6 +1,6 @@ fn compute1() -> float { - let v = [0f, 1f, 2f, 3f]; + let v = [0f, 1f, 2f, 3f]/~; vec::foldl(0f, v) { |x, y| x + y } - 10f //!^ ERROR mismatched types: expected `()` diff --git a/src/test/compile-fail/block-must-not-have-result-for.rs b/src/test/compile-fail/block-must-not-have-result-for.rs index e4830ea519e..e069a356aae 100644 --- a/src/test/compile-fail/block-must-not-have-result-for.rs +++ b/src/test/compile-fail/block-must-not-have-result-for.rs @@ -1,7 +1,7 @@ // error-pattern:mismatched types: expected `()` but found `bool` fn main() { - for vec::each([0]) {|_i| + for vec::each([0]/~) {|_i| true } } \ No newline at end of file diff --git a/src/test/compile-fail/borrowck-assign-comp-idx.rs b/src/test/compile-fail/borrowck-assign-comp-idx.rs index a83f727369e..e2a7610ddc6 100644 --- a/src/test/compile-fail/borrowck-assign-comp-idx.rs +++ b/src/test/compile-fail/borrowck-assign-comp-idx.rs @@ -1,7 +1,7 @@ type point = { x: int, y: int }; fn a() { - let mut p = [mut 1]; + let mut p = [mut 1]/~; // Create an immutable pointer into p's contents: let _q: &int = &p[0]; //! NOTE loan of mutable vec content granted here @@ -15,7 +15,7 @@ fn b() { // here we alias the mutable vector into an imm slice and try to // modify the original: - let mut p = [mut 1]; + let mut p = [mut 1]/~; borrow(p) {|| //! NOTE loan of mutable vec content granted here p[0] = 5; //! ERROR assigning to mutable vec content prohibited due to outstanding loan @@ -25,7 +25,7 @@ fn b() { fn c() { // Legal because the scope of the borrow does not include the // modification: - let mut p = [mut 1]; + let mut p = [mut 1]/~; borrow(p, {||}); p[0] = 5; } diff --git a/src/test/compile-fail/borrowck-binding-mutbl.rs b/src/test/compile-fail/borrowck-binding-mutbl.rs index 183106aa28e..0da9a8a860f 100644 --- a/src/test/compile-fail/borrowck-binding-mutbl.rs +++ b/src/test/compile-fail/borrowck-binding-mutbl.rs @@ -1,8 +1,8 @@ -fn impure(_v: [int]) { +fn impure(_v: [int]/~) { } fn main() { - let x = {mut f: [3]}; + let x = {mut f: [3]/~}; alt x { {f: v} => { diff --git a/src/test/compile-fail/borrowck-loan-vec-content.rs b/src/test/compile-fail/borrowck-loan-vec-content.rs index 80e23570a0d..2e54f948de2 100644 --- a/src/test/compile-fail/borrowck-loan-vec-content.rs +++ b/src/test/compile-fail/borrowck-loan-vec-content.rs @@ -7,13 +7,13 @@ fn takes_imm_elt(_v: &int, f: fn()) { } fn has_mut_vec_and_does_not_try_to_change_it() { - let v = [mut 1, 2, 3]; + let v = [mut 1, 2, 3]/~; takes_imm_elt(&v[0]) {|| } } fn has_mut_vec_but_tries_to_change_it() { - let v = [mut 1, 2, 3]; + let v = [mut 1, 2, 3]/~; takes_imm_elt(&v[0]) {|| //! NOTE loan of mutable vec content granted here v[1] = 4; //! ERROR assigning to mutable vec content prohibited due to outstanding loan } @@ -24,7 +24,7 @@ fn takes_const_elt(_v: &const int, f: fn()) { } fn has_mut_vec_and_tries_to_change_it() { - let v = [mut 1, 2, 3]; + let v = [mut 1, 2, 3]/~; takes_const_elt(&const v[0]) {|| v[1] = 4; } diff --git a/src/test/compile-fail/borrowck-mut-vec-as-imm-slice-bad.rs b/src/test/compile-fail/borrowck-mut-vec-as-imm-slice-bad.rs index c14c35189b5..8735e8e48ba 100644 --- a/src/test/compile-fail/borrowck-mut-vec-as-imm-slice-bad.rs +++ b/src/test/compile-fail/borrowck-mut-vec-as-imm-slice-bad.rs @@ -4,11 +4,11 @@ fn want_slice(v: [int]/&) -> int { ret sum; } -fn has_mut_vec(+v: @[mut int]) -> int { +fn has_mut_vec(+v: @[mut int]/~) -> int { want_slice(*v) //! ERROR illegal borrow unless pure: creating immutable alias to aliasable, mutable memory //!^ NOTE impure due to access to impure function } fn main() { - assert has_mut_vec(@[mut 1, 2, 3]) == 6; + assert has_mut_vec(@[mut 1, 2, 3]/~) == 6; } \ No newline at end of file diff --git a/src/test/compile-fail/empty-vec-trailing-comma.rs b/src/test/compile-fail/empty-vec-trailing-comma.rs index e7bb6b704a1..2d01da0f485 100644 --- a/src/test/compile-fail/empty-vec-trailing-comma.rs +++ b/src/test/compile-fail/empty-vec-trailing-comma.rs @@ -1,3 +1,3 @@ fn main() { - let v = [,]; //! ERROR unexpected token: ',' + let v = [,]/~; //! ERROR unexpected token: ',' } diff --git a/src/test/compile-fail/fail-type-err.rs b/src/test/compile-fail/fail-type-err.rs index e0d3680a381..2bf07c0bf91 100644 --- a/src/test/compile-fail/fail-type-err.rs +++ b/src/test/compile-fail/fail-type-err.rs @@ -1,2 +1,2 @@ // error-pattern:expected `str` but found `[int]/~` -fn main() { fail [0i]; } +fn main() { fail [0i]/~; } diff --git a/src/test/compile-fail/import.rs b/src/test/compile-fail/import.rs index b789a25e7a7..992f4e66eb8 100644 --- a/src/test/compile-fail/import.rs +++ b/src/test/compile-fail/import.rs @@ -4,4 +4,4 @@ import zed::baz; mod zed { fn bar() { #debug("bar"); } } -fn main(args: [str]) { bar(); } +fn main(args: [str]/~) { bar(); } diff --git a/src/test/compile-fail/import2.rs b/src/test/compile-fail/import2.rs index ef81ebfcb4b..ea0c2dcd9b0 100644 --- a/src/test/compile-fail/import2.rs +++ b/src/test/compile-fail/import2.rs @@ -4,4 +4,4 @@ mod baz { } mod zed { fn bar() { #debug("bar3"); } } -fn main(args: [str]) { bar(); } +fn main(args: [str]/~) { bar(); } diff --git a/src/test/compile-fail/import3.rs b/src/test/compile-fail/import3.rs index 35a9c587373..9bb941c6daf 100644 --- a/src/test/compile-fail/import3.rs +++ b/src/test/compile-fail/import3.rs @@ -1,4 +1,4 @@ // error-pattern: unresolved modulename import main::bar; -fn main(args: [str]) { #debug("foo"); } +fn main(args: [str]/~) { #debug("foo"); } diff --git a/src/test/compile-fail/import4.rs b/src/test/compile-fail/import4.rs index aed46fb1232..ab9a9e2e544 100644 --- a/src/test/compile-fail/import4.rs +++ b/src/test/compile-fail/import4.rs @@ -3,4 +3,4 @@ mod a { import foo = b::foo; export foo; } mod b { import foo = a::foo; export foo; } -fn main(args: [str]) { #debug("loop"); } +fn main(args: [str]/~) { #debug("loop"); } diff --git a/src/test/compile-fail/infinite-vec-type-recursion.rs b/src/test/compile-fail/infinite-vec-type-recursion.rs index 709c5b628ee..35d01d0b624 100644 --- a/src/test/compile-fail/infinite-vec-type-recursion.rs +++ b/src/test/compile-fail/infinite-vec-type-recursion.rs @@ -1,6 +1,6 @@ // -*- rust -*- // error-pattern: illegal recursive type -type x = [x]; +type x = [x]/~; -fn main() { let b: x = []; } +fn main() { let b: x = []/~; } diff --git a/src/test/compile-fail/issue-1655.rs b/src/test/compile-fail/issue-1655.rs index 1e0887c1645..32785c23167 100644 --- a/src/test/compile-fail/issue-1655.rs +++ b/src/test/compile-fail/issue-1655.rs @@ -7,5 +7,5 @@ mod blade_runner { was also a really good movie. Alien 3 was crap though." - )] + )]/~ } diff --git a/src/test/compile-fail/issue-2149.rs b/src/test/compile-fail/issue-2149.rs index 8bd0a2eb614..6363ca5fb6c 100644 --- a/src/test/compile-fail/issue-2149.rs +++ b/src/test/compile-fail/issue-2149.rs @@ -1,5 +1,5 @@ -impl monad for [A] { - fn bind(f: fn(A) -> [B]) { +impl monad for [A]/~ { + fn bind(f: fn(A) -> [B]/~) { let mut r = fail; for self.each {|elt| r += f(elt); } //!^ WARNING unreachable expression diff --git a/src/test/compile-fail/issue-2150.rs b/src/test/compile-fail/issue-2150.rs index 6fc8bf78af6..a4877dad5b9 100644 --- a/src/test/compile-fail/issue-2150.rs +++ b/src/test/compile-fail/issue-2150.rs @@ -1,4 +1,4 @@ -fn fail_len(v: [const int]) -> uint { +fn fail_len(v: [const int]/~) -> uint { let mut i = fail; for v.each {|x| i += 1u; } //!^ WARNING unreachable statement diff --git a/src/test/compile-fail/issue-2509-a.rs b/src/test/compile-fail/issue-2509-a.rs index 69970f8dc95..2113a1ec127 100644 --- a/src/test/compile-fail/issue-2509-a.rs +++ b/src/test/compile-fail/issue-2509-a.rs @@ -4,6 +4,6 @@ class c { //! ERROR a class must have at least one field fn main() { let a = c(); - let x = [a]; + let x = [a]/~; let _y = x[0]; } diff --git a/src/test/compile-fail/liveness-issue-2163.rs b/src/test/compile-fail/liveness-issue-2163.rs index 6045a137a9e..e5a7cd0365d 100644 --- a/src/test/compile-fail/liveness-issue-2163.rs +++ b/src/test/compile-fail/liveness-issue-2163.rs @@ -1,5 +1,5 @@ -fn main(_s: [str]) { - let a: [int] = []; +fn main(_s: [str]/~) { + let a: [int]/~ = []/~; vec::each(a) { |_x| //! ERROR not all control paths return a value } } diff --git a/src/test/compile-fail/liveness-use-in-index-lvalue.rs b/src/test/compile-fail/liveness-use-in-index-lvalue.rs index 50d0662f803..fd7ad945a57 100644 --- a/src/test/compile-fail/liveness-use-in-index-lvalue.rs +++ b/src/test/compile-fail/liveness-use-in-index-lvalue.rs @@ -1,5 +1,5 @@ fn test() { - let w: [int]; + let w: [int]/~; w[5] = 0; //! ERROR use of possibly uninitialized variable: `w` } diff --git a/src/test/compile-fail/lub-in-args.rs b/src/test/compile-fail/lub-in-args.rs index 8b59d12783b..1b10639445a 100644 --- a/src/test/compile-fail/lub-in-args.rs +++ b/src/test/compile-fail/lub-in-args.rs @@ -1,8 +1,8 @@ fn two_args(x: T, y: T) { } fn main() { - let x: [mut int] = [mut 3]; - let y: [int] = [3]; + let x: [mut int]/~ = [mut 3]/~; + let y: [int]/~ = [3]/~; let a: @mut int = @mut 3; let b: @int = @3; diff --git a/src/test/compile-fail/mutable-huh-variance-box.rs b/src/test/compile-fail/mutable-huh-variance-box.rs index db38c4ad289..7fbb8bca0f7 100644 --- a/src/test/compile-fail/mutable-huh-variance-box.rs +++ b/src/test/compile-fail/mutable-huh-variance-box.rs @@ -1,10 +1,10 @@ // error-pattern: mismatched types fn main() { - let v = @mut [0]; + let v = @mut [0]/~; - fn f(&&v: @mut [const int]) { - *v = [mut 3] + fn f(&&v: @mut [const int]/~) { + *v = [mut 3]/~ } f(v); diff --git a/src/test/compile-fail/mutable-huh-variance-deep.rs b/src/test/compile-fail/mutable-huh-variance-deep.rs index 7de97e59173..1d7de6b078f 100644 --- a/src/test/compile-fail/mutable-huh-variance-deep.rs +++ b/src/test/compile-fail/mutable-huh-variance-deep.rs @@ -1,9 +1,9 @@ // error-pattern: mismatched types fn main() { - let v = [mut @mut ~mut [0]]; + let v = [mut @mut ~mut [0]/~]/~; - fn f(&&v: [mut @mut ~mut [const int]]) { + fn f(&&v: [mut @mut ~mut [const int]/~]/~) { } f(v); diff --git a/src/test/compile-fail/mutable-huh-variance-ptr.rs b/src/test/compile-fail/mutable-huh-variance-ptr.rs index 852b474eebb..2b807f6cc24 100644 --- a/src/test/compile-fail/mutable-huh-variance-ptr.rs +++ b/src/test/compile-fail/mutable-huh-variance-ptr.rs @@ -3,12 +3,12 @@ use std; fn main() { - let a = [0]; - let v: *mut [int] = ptr::mut_addr_of(a); + let a = [0]/~; + let v: *mut [int]/~ = ptr::mut_addr_of(a); - fn f(&&v: *mut [const int]) { + fn f(&&v: *mut [const int]/~) { unsafe { - *v = [mut 3] + *v = [mut 3]/~ } } diff --git a/src/test/compile-fail/mutable-huh-variance-rec.rs b/src/test/compile-fail/mutable-huh-variance-rec.rs index 78143f7fbc6..326dcd2c1ae 100644 --- a/src/test/compile-fail/mutable-huh-variance-rec.rs +++ b/src/test/compile-fail/mutable-huh-variance-rec.rs @@ -1,10 +1,10 @@ // error-pattern: mismatched types fn main() { - let v = {mut g: [0]}; + let v = {mut g: [0]/~}; - fn f(&&v: {mut g: [const int]}) { - v.g = [mut 3] + fn f(&&v: {mut g: [const int]/~}) { + v.g = [mut 3]/~ } f(v); diff --git a/src/test/compile-fail/mutable-huh-variance-unique.rs b/src/test/compile-fail/mutable-huh-variance-unique.rs index cd8d2ca6f10..2cc0bcb6145 100644 --- a/src/test/compile-fail/mutable-huh-variance-unique.rs +++ b/src/test/compile-fail/mutable-huh-variance-unique.rs @@ -1,10 +1,10 @@ // error-pattern: mismatched types fn main() { - let v = ~mut [0]; + let v = ~mut [0]/~; - fn f(&&v: ~mut [const int]) { - *v = [mut 3] + fn f(&&v: ~mut [const int]/~) { + *v = [mut 3]/~ } f(v); diff --git a/src/test/compile-fail/mutable-huh-variance-vec1.rs b/src/test/compile-fail/mutable-huh-variance-vec1.rs index 3519127ab1b..ba25fbbb9d7 100644 --- a/src/test/compile-fail/mutable-huh-variance-vec1.rs +++ b/src/test/compile-fail/mutable-huh-variance-vec1.rs @@ -1,10 +1,10 @@ fn main() { // Note: explicit type annot is required here // because otherwise the inference gets smart - // and assigns a type of [mut [const int]]. - let v: [mut [int]] = [mut [0]]; + // and assigns a type of [mut [const int]/~]/~. + let v: [mut [int]/~]/~ = [mut [0]/~]/~; - fn f(&&v: [mut [const int]]) { + fn f(&&v: [mut [const int]/~]/~) { v[0] = [mut 3] } diff --git a/src/test/compile-fail/mutable-huh-variance-vec2.rs b/src/test/compile-fail/mutable-huh-variance-vec2.rs index 09c3565e8b2..cab07b804e6 100644 --- a/src/test/compile-fail/mutable-huh-variance-vec2.rs +++ b/src/test/compile-fail/mutable-huh-variance-vec2.rs @@ -1,10 +1,10 @@ fn main() { // Note: explicit type annot is required here // because otherwise the inference gets smart - // and assigns a type of [mut [const int]]. - let v: [mut [mut int]] = [mut [mut 0]]; + // and assigns a type of [mut [const int]/~]/~. + let v: [mut [mut int]/~]/~ = [mut [mut 0]/~]/~; - fn f(&&v: [mut [const int]]) { + fn f(&&v: [mut [const int]/~]/~) { v[0] = [3] } diff --git a/src/test/compile-fail/mutable-huh-variance-vec3.rs b/src/test/compile-fail/mutable-huh-variance-vec3.rs index 97b6ccabdfd..afba07126cf 100644 --- a/src/test/compile-fail/mutable-huh-variance-vec3.rs +++ b/src/test/compile-fail/mutable-huh-variance-vec3.rs @@ -1,10 +1,10 @@ fn main() { // Note: explicit type annot is required here // because otherwise the inference gets smart - // and assigns a type of [mut [const int]]. + // and assigns a type of [mut [const int]/~]/~. let v: [mut[mut[int]]] = [mut [mut [0]]]; - fn f(&&v: [mut [mut [const int]]]) { + fn f(&&v: [mut [mut [const int]/~]/~]/~) { v[0][1] = [mut 3] } diff --git a/src/test/compile-fail/mutable-huh-variance-vec4.rs b/src/test/compile-fail/mutable-huh-variance-vec4.rs index 74b6ad9ff62..a7394928076 100644 --- a/src/test/compile-fail/mutable-huh-variance-vec4.rs +++ b/src/test/compile-fail/mutable-huh-variance-vec4.rs @@ -3,26 +3,26 @@ fn main() { // Note: here we do not have any type annotations // but we do express conflicting requirements: - let v = [mut [0]]; - let w = [mut [mut 0]]; - let x = [mut [mut 0]]; + let v = [mut [0]/~]/~; + let w = [mut [mut 0]/~]/~; + let x = [mut [mut 0]/~]/~; - fn f(&&v: [mut [int]]) { + fn f(&&v: [mut [int]/~]/~) { v[0] = [3] } - fn g(&&v: [const [const int]]) { + fn g(&&v: [const [const int]/~]/~) { } - fn h(&&v: [mut [mut int]]) { + fn h(&&v: [mut [mut int]/~]/~) { v[0] = [mut 3] } - fn i(&&v: [mut [const int]]) { + fn i(&&v: [mut [const int]/~]/~) { v[0] = [mut 3] } - fn j(&&v: [[const int]]) { + fn j(&&v: [[const int]/~]/~) { } f(v); @@ -38,7 +38,7 @@ fn main() { j(w); //! ERROR (values differ in mutability) // Note that without adding f() or h() to the mix, it is valid for - // x to have the type [mut [const int]], and thus we can safely + // x to have the type [mut [const int]/~]/~, and thus we can safely // call g() and i() but not j(): g(x); i(x); diff --git a/src/test/compile-fail/mutable-huh-vec-assign.rs b/src/test/compile-fail/mutable-huh-vec-assign.rs index 0f2845a8bd0..3a94c6d9b17 100644 --- a/src/test/compile-fail/mutable-huh-vec-assign.rs +++ b/src/test/compile-fail/mutable-huh-vec-assign.rs @@ -1,10 +1,10 @@ fn main() { - fn f(&&v: [const int]) { + fn f(&&v: [const int]/~) { // This shouldn't be possible v[0] = 1 //! ERROR assigning to const vec content } - let v = [0]; + let v = [0]/~; f(v); } diff --git a/src/test/compile-fail/nested-ty-params.rs b/src/test/compile-fail/nested-ty-params.rs index c8d08ad320d..12e2205c3b2 100644 --- a/src/test/compile-fail/nested-ty-params.rs +++ b/src/test/compile-fail/nested-ty-params.rs @@ -1,5 +1,5 @@ // error-pattern:attempt to use a type argument out of scope -fn hd(v: [U]) -> U { +fn hd(v: [U]/~) -> U { fn hd1(w: [U]) -> U { ret w[0]; } ret hd1(v); diff --git a/src/test/compile-fail/no-capture-arc.rs b/src/test/compile-fail/no-capture-arc.rs index 6c8c80dab0a..ec489529739 100644 --- a/src/test/compile-fail/no-capture-arc.rs +++ b/src/test/compile-fail/no-capture-arc.rs @@ -3,7 +3,7 @@ import comm::*; fn main() { - let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~; let arc_v = arc::arc(v); task::spawn() {|| diff --git a/src/test/compile-fail/no-reuse-move-arc.rs b/src/test/compile-fail/no-reuse-move-arc.rs index fa88111d274..2369edd43dc 100644 --- a/src/test/compile-fail/no-reuse-move-arc.rs +++ b/src/test/compile-fail/no-reuse-move-arc.rs @@ -1,7 +1,7 @@ import comm::*; fn main() { - let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~; let arc_v = arc::arc(v); task::spawn() {|move arc_v| //! NOTE move of variable occurred here diff --git a/src/test/compile-fail/non-const.rs b/src/test/compile-fail/non-const.rs index c438a5bafba..ab542f3bc1e 100644 --- a/src/test/compile-fail/non-const.rs +++ b/src/test/compile-fail/non-const.rs @@ -17,8 +17,8 @@ class r2 { fn main() { foo({f: 3}); foo({mut f: 3}); //! ERROR missing `const` - foo([1]); - foo([mut 1]); //! ERROR missing `const` + foo([1]/~); + foo([mut 1]/~); //! ERROR missing `const` foo(~1); foo(~mut 1); //! ERROR missing `const` foo(@1); diff --git a/src/test/compile-fail/non-copyable-void.rs b/src/test/compile-fail/non-copyable-void.rs index 91a625b05d0..07b93989ba7 100644 --- a/src/test/compile-fail/non-copyable-void.rs +++ b/src/test/compile-fail/non-copyable-void.rs @@ -1,5 +1,5 @@ fn main() { - let x : *[int] = ptr::addr_of([1,2,3]); + let x : *[int]/~ = ptr::addr_of([1,2,3]/~); let y : *libc::c_void = x as *libc::c_void; unsafe { let _z = *y; diff --git a/src/test/compile-fail/nonsense-constraints.rs b/src/test/compile-fail/nonsense-constraints.rs index b3ac40be24a..e05581aa10b 100644 --- a/src/test/compile-fail/nonsense-constraints.rs +++ b/src/test/compile-fail/nonsense-constraints.rs @@ -3,10 +3,10 @@ use std; import uint; -fn enum_chars(start: u8, end: u8) : uint::le(start, end) -> [char] { +fn enum_chars(start: u8, end: u8) : uint::le(start, end) -> [char]/~ { let i = start; - let r = []; - while i <= end { r += [i as char]; i += 1u as u8; } + let r = []/~; + while i <= end { r += [i as char]/~; i += 1u as u8; } ret r; } diff --git a/src/test/compile-fail/pattern-tyvar-2.rs b/src/test/compile-fail/pattern-tyvar-2.rs index 6ebc4a27626..162240ed8aa 100644 --- a/src/test/compile-fail/pattern-tyvar-2.rs +++ b/src/test/compile-fail/pattern-tyvar-2.rs @@ -6,7 +6,7 @@ import option::some; // error-pattern: mismatched types -enum bar { t1((), option<[int]>), t2, } +enum bar { t1((), option<[int]/~>), t2, } fn foo(t: bar) -> int { alt t { t1(_, some(x)) { ret x * 3; } _ { fail; } } } diff --git a/src/test/compile-fail/pattern-tyvar.rs b/src/test/compile-fail/pattern-tyvar.rs index 607e32685f3..60c555413ae 100644 --- a/src/test/compile-fail/pattern-tyvar.rs +++ b/src/test/compile-fail/pattern-tyvar.rs @@ -5,7 +5,7 @@ import option::some; // error-pattern: mismatched types -enum bar { t1((), option<[int]>), t2, } +enum bar { t1((), option<[int]/~>), t2, } fn foo(t: bar) { alt t { diff --git a/src/test/compile-fail/qquote-1.rs b/src/test/compile-fail/qquote-1.rs index 0750a7c9dfd..c88bf563e71 100644 --- a/src/test/compile-fail/qquote-1.rs +++ b/src/test/compile-fail/qquote-1.rs @@ -29,7 +29,7 @@ impl of fake_ext_ctxt for fake_session { } fn mk_ctxt() -> fake_ext_ctxt { - let opts : fake_options = {cfg: []}; + let opts : fake_options = {cfg: []/~}; {opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt } diff --git a/src/test/compile-fail/qquote-2.rs b/src/test/compile-fail/qquote-2.rs index a4871e5d497..af5cf1c3fab 100644 --- a/src/test/compile-fail/qquote-2.rs +++ b/src/test/compile-fail/qquote-2.rs @@ -29,7 +29,7 @@ impl of fake_ext_ctxt for fake_session { } fn mk_ctxt() -> fake_ext_ctxt { - let opts : fake_options = {cfg: []}; + let opts : fake_options = {cfg: []/~}; {opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt } diff --git a/src/test/compile-fail/regions-escape-loop-via-vec.rs b/src/test/compile-fail/regions-escape-loop-via-vec.rs index b44c38e68f8..309cb030233 100644 --- a/src/test/compile-fail/regions-escape-loop-via-vec.rs +++ b/src/test/compile-fail/regions-escape-loop-via-vec.rs @@ -2,10 +2,10 @@ // This generates a ton of error msgs at the moment. fn broken() -> int { let mut x = 3; - let mut y = [&mut x]; //! ERROR reference is not valid + let mut y = [&mut x]/~; //! ERROR reference is not valid while x < 10 { let mut z = x; - y += [&mut z]; + y += [&mut z]/~; x += 1; } vec::foldl(0, y) {|v, p| v + *p } diff --git a/src/test/compile-fail/seq-args.rs b/src/test/compile-fail/seq-args.rs index 4273675e4da..0587e21e5aa 100644 --- a/src/test/compile-fail/seq-args.rs +++ b/src/test/compile-fail/seq-args.rs @@ -2,7 +2,7 @@ use std; fn main() { iface seq { } -impl of seq for [T] { //! ERROR wrong number of type arguments +impl of seq for [T]/~ { //! ERROR wrong number of type arguments /* ... */ } impl of seq for u32 { diff --git a/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs b/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs index 01d428c7917..a7b0bb7175d 100644 --- a/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs +++ b/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs @@ -5,7 +5,7 @@ use core; -fn last(v: [const T]) -> core::option { +fn last(v: [const T]/~) -> core::option { fail; } diff --git a/src/test/compile-fail/tstate-unsat-in-called-fn-expr.rs b/src/test/compile-fail/tstate-unsat-in-called-fn-expr.rs index 010ac92f7b0..24cfa527e95 100644 --- a/src/test/compile-fail/tstate-unsat-in-called-fn-expr.rs +++ b/src/test/compile-fail/tstate-unsat-in-called-fn-expr.rs @@ -2,7 +2,7 @@ fn foo(v: [int]) : vec::is_empty(v) { #debug("%d", v[0]); } fn main() { let f = fn@() { - let v = [1]; + let v = [1]/~; foo(v); //! ERROR unsatisfied precondition constraint }(); log(error, f); diff --git a/src/test/compile-fail/tstate-unsat-in-fn-expr.rs b/src/test/compile-fail/tstate-unsat-in-fn-expr.rs index b9cd2582537..753655d354c 100644 --- a/src/test/compile-fail/tstate-unsat-in-fn-expr.rs +++ b/src/test/compile-fail/tstate-unsat-in-fn-expr.rs @@ -2,7 +2,7 @@ fn foo(v: [int]) : vec::is_empty(v) { #debug("%d", v[0]); } fn main() { let f = fn@() { - let v = [1]; + let v = [1]/~; foo(v); //! ERROR unsatisfied precondition constraint }; log(error, f()); diff --git a/src/test/compile-fail/unique-vec-res.rs b/src/test/compile-fail/unique-vec-res.rs index 92dcd538aa7..5159d0ddbfe 100644 --- a/src/test/compile-fail/unique-vec-res.rs +++ b/src/test/compile-fail/unique-vec-res.rs @@ -6,15 +6,15 @@ class r { drop { *(self.i) = *(self.i) + 1; } } -fn f(+i: [T], +j: [T]) { +fn f(+i: [T]/~, +j: [T]/~) { let k = i + j; } fn main() { let i1 = @mut 0; let i2 = @mut 1; - let r1 <- [~r(i1)]; - let r2 <- [~r(i2)]; + let r1 <- [~r(i1)]/~; + let r2 <- [~r(i2)]/~; f(r1, r2); log(debug, (r2, *i1)); log(debug, (r1, *i2)); diff --git a/src/test/compile-fail/vec-add.rs b/src/test/compile-fail/vec-add.rs index 6642112d998..583ffb9c197 100644 --- a/src/test/compile-fail/vec-add.rs +++ b/src/test/compile-fail/vec-add.rs @@ -4,19 +4,19 @@ // the right hand side in all cases. We are getting compiler errors // about this now, so I'm xfailing the test for now. -eholk -fn add(i: [int], m: [mut int], c: [const int]) { +fn add(i: [int]/~, m: [mut int]/~, c: [const int]/~) { // Check that: // (1) vectors of any two mutabilities can be added // (2) result has mutability of lhs - add(i + [3], - m + [3], - [3]); + add(i + [3]/~, + m + [3]/~, + [3]/~); - add(i + [mut 3], - m + [mut 3], - [mut 3]); + add(i + [mut 3]/~, + m + [mut 3]/~, + [mut 3]/~); add(i + i, m + i, @@ -30,33 +30,33 @@ fn add(i: [int], m: [mut int], c: [const int]) { m + c, c); - add(m + [3], //! ERROR mismatched types - m + [3], - m + [3]); + add(m + [3]/~, //! ERROR mismatched types + m + [3]/~, + m + [3]/~); - add(i + [3], - i + [3], //! ERROR mismatched types - i + [3]); + add(i + [3]/~, + i + [3]/~, //! ERROR mismatched types + i + [3]/~); - add(c + [3], //! ERROR mismatched types + add(c + [3]/~, //! ERROR mismatched types //!^ ERROR binary operation + cannot be applied - c + [3], //! ERROR binary operation + cannot be applied + c + [3]/~, //! ERROR binary operation + cannot be applied //!^ mismatched types - [3]); + [3]/~); - add(m + [mut 3], //! ERROR mismatched types - m + [mut 3], - m + [mut 3]); + add(m + [mut 3]/~, //! ERROR mismatched types + m + [mut 3]/~, + m + [mut 3]/~); - add(i + [mut 3], - i + [mut 3], //! ERROR mismatched types - i + [mut 3]); + add(i + [mut 3]/~, + i + [mut 3]/~, //! ERROR mismatched types + i + [mut 3]/~); - add(c + [mut 3], //! ERROR binary operation + cannot be applied + add(c + [mut 3]/~, //! ERROR binary operation + cannot be applied //!^ mismatched types - c + [mut 3], //! ERROR binary operation + cannot be applied + c + [mut 3]/~, //! ERROR binary operation + cannot be applied //!^ mismatched types - [mut 3]); + [mut 3]/~); add(m + i, //! ERROR mismatched types m + i, diff --git a/src/test/compile-fail/vec-concat-bug.rs b/src/test/compile-fail/vec-concat-bug.rs index 510bef1cae0..40e6e2b0358 100644 --- a/src/test/compile-fail/vec-concat-bug.rs +++ b/src/test/compile-fail/vec-concat-bug.rs @@ -1,8 +1,8 @@ -fn concat(v: [const [const T]]) -> [T] { - let mut r = []; +fn concat(v: [const [const T]/~]/~) -> [T]/~ { + let mut r = []/~; // Earlier versions of our type checker accepted this: - vec::iter(v) {|&&inner: [T]| + vec::iter(v) {|&&inner: [T]/~| //!^ ERROR values differ in mutability r += inner; } diff --git a/src/test/compile-fail/vec-field.rs b/src/test/compile-fail/vec-field.rs index 7eb7e118b39..936b14b6c7d 100644 --- a/src/test/compile-fail/vec-field.rs +++ b/src/test/compile-fail/vec-field.rs @@ -2,7 +2,7 @@ // issue #367 fn f() { - let v = [1i]; + let v = [1i]/~; log(debug, v.some_field_name); //type error } diff --git a/src/test/compile-fail/vec-res-add.rs b/src/test/compile-fail/vec-res-add.rs index f6388b19798..4ce5ad3cbf0 100644 --- a/src/test/compile-fail/vec-res-add.rs +++ b/src/test/compile-fail/vec-res-add.rs @@ -8,8 +8,8 @@ class r { fn main() { // This can't make sense as it would copy the classes - let i <- [r(0)]; - let j <- [r(1)]; + let i <- [r(0)]/~; + let j <- [r(1)]/~; let k = i + j; log(debug, j); } diff --git a/src/test/compile-fail/vector-no-ann.rs b/src/test/compile-fail/vector-no-ann.rs index 8d4478d949f..c68995a6854 100644 --- a/src/test/compile-fail/vector-no-ann.rs +++ b/src/test/compile-fail/vector-no-ann.rs @@ -1,3 +1,3 @@ fn main() { - let _foo = []; //! ERROR unconstrained type + let _foo = []/~; //! ERROR unconstrained type } diff --git a/src/test/pretty/blank-lines.rs b/src/test/pretty/blank-lines.rs index 13cf815e80d..837b85280ec 100644 --- a/src/test/pretty/blank-lines.rs +++ b/src/test/pretty/blank-lines.rs @@ -1,5 +1,5 @@ // pp-exact -fn f() -> [int] { +fn f() -> [int]/~ { let picard = 0; let data = 1; @@ -7,7 +7,7 @@ fn f() -> [int] { let worf = 2; - let enterprise = [picard, data, worf]; + let enterprise = [picard, data, worf]/~; diff --git a/src/test/pretty/block-disambig.rs b/src/test/pretty/block-disambig.rs index 4157515b41c..a6e0cc7f610 100644 --- a/src/test/pretty/block-disambig.rs +++ b/src/test/pretty/block-disambig.rs @@ -39,9 +39,9 @@ fn test8() -> int { fn test9() { let regs = @mut 0; alt check true { true { } } *regs += 1; } fn test10() -> int { - let regs = @mut [0]; + let regs = @mut [0]/~; alt check true { true { } } (*regs)[0] } -fn test11() -> [int] { if true { } [1, 2] } +fn test11() -> [int]/~ { if true { } [1, 2]/~ } diff --git a/src/test/pretty/vec-comments.pp b/src/test/pretty/vec-comments.pp index 2cd5fffdf75..5492484d0c8 100644 --- a/src/test/pretty/vec-comments.pp +++ b/src/test/pretty/vec-comments.pp @@ -9,11 +9,11 @@ fn main() { // Comment 1, // Comment - 2]; + 2]/~; let v2 = [0, // Comment 1, // Comment - 2]; // Comment + 2]/~; // Comment let v3 = [ /* Comment */ @@ -21,9 +21,9 @@ fn main() { /* Comment */ 1, /* Comment */ - 2]; + 2]/~; let v4 = [0, /* Comment */ 1, /* Comment */ - 2]; /* Comment */ + 2]/~; /* Comment */ } diff --git a/src/test/pretty/vec-comments.rs b/src/test/pretty/vec-comments.rs index 2cd5fffdf75..5492484d0c8 100644 --- a/src/test/pretty/vec-comments.rs +++ b/src/test/pretty/vec-comments.rs @@ -9,11 +9,11 @@ fn main() { // Comment 1, // Comment - 2]; + 2]/~; let v2 = [0, // Comment 1, // Comment - 2]; // Comment + 2]/~; // Comment let v3 = [ /* Comment */ @@ -21,9 +21,9 @@ fn main() { /* Comment */ 1, /* Comment */ - 2]; + 2]/~; let v4 = [0, /* Comment */ 1, /* Comment */ - 2]; /* Comment */ + 2]/~; /* Comment */ } diff --git a/src/test/pretty/vec-type.pp b/src/test/pretty/vec-type.pp index 60265eebcf7..af111a64c3f 100644 --- a/src/test/pretty/vec-type.pp +++ b/src/test/pretty/vec-type.pp @@ -1,5 +1,5 @@ // pp-exact:vec-type.pp -fn f1(x: [int]) { } +fn f1(x: [int]/~) { } -fn g1() { f1([1, 2, 3]); } +fn g1() { f1([1, 2, 3]/~); } diff --git a/src/test/pretty/vec-type.rs b/src/test/pretty/vec-type.rs index 60265eebcf7..af111a64c3f 100644 --- a/src/test/pretty/vec-type.rs +++ b/src/test/pretty/vec-type.rs @@ -1,5 +1,5 @@ // pp-exact:vec-type.pp -fn f1(x: [int]) { } +fn f1(x: [int]/~) { } -fn g1() { f1([1, 2, 3]); } +fn g1() { f1([1, 2, 3]/~); } diff --git a/src/test/run-fail/bug-2470-bounds-check-overflow-2.rs b/src/test/run-fail/bug-2470-bounds-check-overflow-2.rs index 215fd53ce5e..dcea4e893f6 100644 --- a/src/test/run-fail/bug-2470-bounds-check-overflow-2.rs +++ b/src/test/run-fail/bug-2470-bounds-check-overflow-2.rs @@ -2,7 +2,7 @@ // error-pattern:bounds check fn main() { - let x = [1u,2u,3u]; + let x = [1u,2u,3u]/~; // This should cause a bounds-check failure, but may not if we do our // bounds checking by comparing a scaled index value to the vector's diff --git a/src/test/run-fail/bug-2470-bounds-check-overflow-3.rs b/src/test/run-fail/bug-2470-bounds-check-overflow-3.rs index 26737f8a9fa..a547bc1a3f3 100644 --- a/src/test/run-fail/bug-2470-bounds-check-overflow-3.rs +++ b/src/test/run-fail/bug-2470-bounds-check-overflow-3.rs @@ -3,7 +3,7 @@ #[cfg(target_arch="x86")] fn main() { - let x = [1u,2u,3u]; + let x = [1u,2u,3u]/~; // This should cause a bounds-check failure, but may not if we do our // bounds checking by truncating the index value to the size of the @@ -23,6 +23,6 @@ fn main() { #[cfg(target_arch="x86_64")] fn main() { // This version just fails anyways, for symmetry on 64-bit hosts. - let x = [1u,2u,3u]; + let x = [1u,2u,3u]/~; #error("ov3 0x%x", x[200]); } diff --git a/src/test/run-fail/bug-2470-bounds-check-overflow.rs b/src/test/run-fail/bug-2470-bounds-check-overflow.rs index 84bd9ab5c76..710cecd99e2 100644 --- a/src/test/run-fail/bug-2470-bounds-check-overflow.rs +++ b/src/test/run-fail/bug-2470-bounds-check-overflow.rs @@ -8,7 +8,7 @@ fn main() { // address of the 0th cell in the array (even though the index is // huge). - let x = [1u,2u,3u]; + let x = [1u,2u,3u]/~; vec::unpack_slice(x) {|p, _len| let base = p as uint; // base = 0x1230 say let idx = base / sys::size_of::(); // idx = 0x0246 say diff --git a/src/test/run-fail/unwind-box-vec.rs b/src/test/run-fail/unwind-box-vec.rs index 7aa411ab017..616a9e75cef 100644 --- a/src/test/run-fail/unwind-box-vec.rs +++ b/src/test/run-fail/unwind-box-vec.rs @@ -5,7 +5,7 @@ fn failfn() { } fn main() { - let x = @[0, 1, 2, 3, 4, 5]; + let x = @[0, 1, 2, 3, 4, 5]/~; failfn(); log(error, x); } \ No newline at end of file diff --git a/src/test/run-fail/unwind-interleaved.rs b/src/test/run-fail/unwind-interleaved.rs index 0aacf579a6d..d914ff82219 100644 --- a/src/test/run-fail/unwind-interleaved.rs +++ b/src/test/run-fail/unwind-interleaved.rs @@ -5,8 +5,8 @@ fn a() { } fn b() { fail; } fn main() { - let x = [0]; + let x = [0]/~; a(); - let y = [0]; + let y = [0]/~; b(); } \ No newline at end of file diff --git a/src/test/run-fail/unwind-misc-1.rs b/src/test/run-fail/unwind-misc-1.rs index ad6f9feafc3..442efe3be25 100644 --- a/src/test/run-fail/unwind-misc-1.rs +++ b/src/test/run-fail/unwind-misc-1.rs @@ -7,16 +7,16 @@ import uint; fn main() { let count = @mut 0u; - fn hash(&&s: [@str]) -> uint { + fn hash(&&s: [@str]/~) -> uint { if (vec::len(s) > 0u && str::eq(*s[0], "boom")) { fail; } ret 10u; } - fn eq(&&s: [@str], &&t: [@str]) -> bool { + fn eq(&&s: [@str]/~, &&t: [@str]/~) -> bool { ret s == t; } let map = map::hashmap(hash, eq); - let mut arr = []; + let mut arr = []/~; for uint::range(0u, 10u) {|i| arr += [@"key stuff"]; map.insert(arr, arr + [@"value stuff"]); diff --git a/src/test/run-fail/unwind-partial-box.rs b/src/test/run-fail/unwind-partial-box.rs index a08bb27811b..7ff5c6ec7f3 100644 --- a/src/test/run-fail/unwind-partial-box.rs +++ b/src/test/run-fail/unwind-partial-box.rs @@ -1,6 +1,6 @@ // error-pattern:fail -fn f() -> [int] { fail; } +fn f() -> [int]/~ { fail; } // Voodoo. In unwind-alt we had to do this to trigger the bug. Might // have been to do with memory allocation patterns. diff --git a/src/test/run-fail/unwind-partial-unique.rs b/src/test/run-fail/unwind-partial-unique.rs index b7f5d1054ec..5f998e802dc 100644 --- a/src/test/run-fail/unwind-partial-unique.rs +++ b/src/test/run-fail/unwind-partial-unique.rs @@ -1,6 +1,6 @@ // error-pattern:fail -fn f() -> [int] { fail; } +fn f() -> [int]/~ { fail; } // Voodoo. In unwind-alt we had to do this to trigger the bug. Might // have been to do with memory allocation patterns. diff --git a/src/test/run-fail/unwind-partial-vec.rs b/src/test/run-fail/unwind-partial-vec.rs index 0e4b27cd836..feb7449c532 100644 --- a/src/test/run-fail/unwind-partial-vec.rs +++ b/src/test/run-fail/unwind-partial-vec.rs @@ -1,6 +1,6 @@ // error-pattern:fail -fn f() -> [int] { fail; } +fn f() -> [int]/~ { fail; } // Voodoo. In unwind-alt we had to do this to trigger the bug. Might // have been to do with memory allocation patterns. @@ -9,7 +9,7 @@ fn prime() { } fn partial() { - let x = [[0], f(), [0]]; + let x = [[0]/~, f(), [0]/~]/~; } fn main() { diff --git a/src/test/run-fail/unwind-rec.rs b/src/test/run-fail/unwind-rec.rs index c6dd2d0abdf..58fb32cf3e5 100644 --- a/src/test/run-fail/unwind-rec.rs +++ b/src/test/run-fail/unwind-rec.rs @@ -1,6 +1,6 @@ // error-pattern:fail -fn build() -> [int] { +fn build() -> [int]/~ { fail; } diff --git a/src/test/run-fail/unwind-rec2.rs b/src/test/run-fail/unwind-rec2.rs index 4e4afd31a84..10e9cd70323 100644 --- a/src/test/run-fail/unwind-rec2.rs +++ b/src/test/run-fail/unwind-rec2.rs @@ -1,10 +1,10 @@ // error-pattern:fail -fn build1() -> [int] { - [0,0,0,0,0,0,0] +fn build1() -> [int]/~ { + [0,0,0,0,0,0,0]/~ } -fn build2() -> [int] { +fn build2() -> [int]/~ { fail; } diff --git a/src/test/run-fail/unwind-tup.rs b/src/test/run-fail/unwind-tup.rs index 6562c5ded27..219e4d2a311 100644 --- a/src/test/run-fail/unwind-tup.rs +++ b/src/test/run-fail/unwind-tup.rs @@ -1,6 +1,6 @@ // error-pattern:fail -fn fold_local() -> @[int]{ +fn fold_local() -> @[int]/~{ fail; } diff --git a/src/test/run-fail/unwind-tup2.rs b/src/test/run-fail/unwind-tup2.rs index 0c1d0890b9d..cba38527da9 100644 --- a/src/test/run-fail/unwind-tup2.rs +++ b/src/test/run-fail/unwind-tup2.rs @@ -1,10 +1,10 @@ // error-pattern:fail -fn fold_local() -> @[int]{ - @[0,0,0,0,0,0] +fn fold_local() -> @[int]/~{ + @[0,0,0,0,0,0]/~ } -fn fold_remote() -> @[int]{ +fn fold_remote() -> @[int]/~{ fail; } diff --git a/src/test/run-fail/vec-overrun.rs b/src/test/run-fail/vec-overrun.rs index 7a429e08cae..f0d21ec7a73 100644 --- a/src/test/run-fail/vec-overrun.rs +++ b/src/test/run-fail/vec-overrun.rs @@ -2,7 +2,7 @@ // error-pattern:bounds check fn main() { - let v: [int] = [10]; + let v: [int]/~ = [10]/~; let x: int = 0; assert (v[x] == 10); // Bounds-check failure. diff --git a/src/test/run-fail/vec-underrun.rs b/src/test/run-fail/vec-underrun.rs index 9caf82d1ae0..faeeed44603 100644 --- a/src/test/run-fail/vec-underrun.rs +++ b/src/test/run-fail/vec-underrun.rs @@ -2,7 +2,7 @@ // error-pattern:bounds check fn main() { - let v: [int] = [10, 20]; + let v: [int]/~ = [10, 20]/~; let x: int = 0; assert (v[x] == 10); // Bounds-check failure. diff --git a/src/test/run-fail/zip-different-lengths.rs b/src/test/run-fail/zip-different-lengths.rs index 736b61cb935..f41c03e7e76 100644 --- a/src/test/run-fail/zip-different-lengths.rs +++ b/src/test/run-fail/zip-different-lengths.rs @@ -6,18 +6,18 @@ import uint; import u8; import vec::*; -fn enum_chars(start: u8, end: u8) -> [char] { +fn enum_chars(start: u8, end: u8) -> [char]/~ { assert start < end; let mut i = start; - let mut r = []; + let mut r = []/~; while i <= end { vec::push(r, i as char); i += 1u as u8; } ret r; } -fn enum_uints(start: uint, end: uint) -> [uint] { +fn enum_uints(start: uint, end: uint) -> [uint]/~ { assert start < end; let mut i = start; - let mut r = []; + let mut r = []/~; while i <= end { vec::push(r, i); i += 1u; } ret r; } diff --git a/src/test/run-pass-fulldeps/issue-1926.rs b/src/test/run-pass-fulldeps/issue-1926.rs index 383a1acec39..856e916ac0c 100644 --- a/src/test/run-pass-fulldeps/issue-1926.rs +++ b/src/test/run-pass-fulldeps/issue-1926.rs @@ -39,7 +39,7 @@ impl of fake_ext_ctxt for fake_session { } fn mk_ctxt() -> fake_ext_ctxt { - let opts : fake_options = {cfg: []}; + let opts : fake_options = {cfg: []/~}; {opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt } diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 625e3be7ecd..d6da96d0393 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -19,7 +19,7 @@ iface fake_ext_ctxt { type fake_session = (); impl of fake_ext_ctxt for fake_session { - fn cfg() -> ast::crate_cfg { [] } + fn cfg() -> ast::crate_cfg { []/~ } fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) } } diff --git a/src/test/run-pass/alloca-from-derived-tydesc.rs b/src/test/run-pass/alloca-from-derived-tydesc.rs index 1bad1418382..df4fac9945f 100644 --- a/src/test/run-pass/alloca-from-derived-tydesc.rs +++ b/src/test/run-pass/alloca-from-derived-tydesc.rs @@ -1,7 +1,7 @@ enum option { some(T), none, } -type r = {mut v: [option]}; +type r = {mut v: [option]/~}; -fn f() -> [T] { ret []; } +fn f() -> [T]/~ { ret []/~; } -fn main() { let r: r = {mut v: []}; r.v = f(); } +fn main() { let r: r = {mut v: []/~}; r.v = f(); } diff --git a/src/test/run-pass/alt-join.rs b/src/test/run-pass/alt-join.rs index 60ddb99c26a..ecc24877d2f 100644 --- a/src/test/run-pass/alt-join.rs +++ b/src/test/run-pass/alt-join.rs @@ -4,12 +4,12 @@ import option; fn foo(y: option) { let mut x: int; - let mut rs: [int] = []; + let mut rs: [int]/~ = []/~; /* tests that x doesn't get put in the precondition for the entire if expression */ if true { - } else { alt y { none:: { x = 17; } _ { x = 42; } } rs += [x]; } + } else { alt y { none:: { x = 17; } _ { x = 42; } } rs += [x]/~; } ret; } diff --git a/src/test/run-pass/argv.rs b/src/test/run-pass/argv.rs index f0a7681acfe..d49f54115ec 100644 --- a/src/test/run-pass/argv.rs +++ b/src/test/run-pass/argv.rs @@ -1,5 +1,5 @@ -fn main(args: [str]) { +fn main(args: [str]/~) { let vs: [str] = ["hi", "there", "this", "is", "a", "vec"]; - let vvs: [[str]] = [args, vs]; + let vvs: [[str]/~]/~ = [args, vs]/~; for vvs.each {|vs| for vs.each {|s| log(debug, s); } } } diff --git a/src/test/run-pass/auto-loop.rs b/src/test/run-pass/auto-loop.rs index 223aca32bcb..7a795f7cdfe 100644 --- a/src/test/run-pass/auto-loop.rs +++ b/src/test/run-pass/auto-loop.rs @@ -1,5 +1,5 @@ fn main() { let mut sum = 0; - for vec::each([1, 2, 3, 4, 5]) {|x| sum += x; } + for vec::each([1, 2, 3, 4, 5]/~) {|x| sum += x; } assert (sum == 15); } diff --git a/src/test/run-pass/auto_serialize.rs b/src/test/run-pass/auto_serialize.rs index deb87b813d1..6a3d54b5e35 100644 --- a/src/test/run-pass/auto_serialize.rs +++ b/src/test/run-pass/auto_serialize.rs @@ -58,7 +58,7 @@ type some_rec = {v: uint_vec}; enum an_enum = some_rec; #[auto_serialize] -type uint_vec = [uint]; +type uint_vec = [uint]/~; #[auto_serialize] type point = {x: uint, y: uint}; @@ -91,7 +91,7 @@ fn main() { deserialize_spanned_uint, serialize_spanned_uint); - test_ser_and_deser(an_enum({v: [1u, 2u, 3u]}), + test_ser_and_deser(an_enum({v: [1u, 2u, 3u]/~}), "an_enum({v: [1u, 2u, 3u]})", serialize_an_enum, deserialize_an_enum, @@ -103,7 +103,7 @@ fn main() { deserialize_point, serialize_point); - test_ser_and_deser([1u, 2u, 3u], + test_ser_and_deser([1u, 2u, 3u]/~, "[1u, 2u, 3u]", serialize_uint_vec, deserialize_uint_vec, @@ -132,4 +132,4 @@ fn main() { serialize_c_like, deserialize_c_like, serialize_c_like); -} \ No newline at end of file +} diff --git a/src/test/run-pass/autobind.rs b/src/test/run-pass/autobind.rs index 3269af16afa..e4a0db4f15d 100644 --- a/src/test/run-pass/autobind.rs +++ b/src/test/run-pass/autobind.rs @@ -1,9 +1,9 @@ fn f(x: [T]) -> T { ret x[0]; } -fn g(act: fn([int]) -> int) -> int { ret act([1, 2, 3]); } +fn g(act: fn([int]/~) -> int) -> int { ret act([1, 2, 3]/~); } fn main() { assert (g(f) == 1); - let f1: fn([str]) -> str = f; + let f1: fn([str]/~) -> str = f; assert (f1(["x", "y", "z"]) == "x"); } diff --git a/src/test/run-pass/block-arg-can-be-followed-by-binop.rs b/src/test/run-pass/block-arg-can-be-followed-by-binop.rs index a8e763e96a6..1b00df3b750 100644 --- a/src/test/run-pass/block-arg-can-be-followed-by-binop.rs +++ b/src/test/run-pass/block-arg-can-be-followed-by-binop.rs @@ -1,5 +1,5 @@ fn main() { - let v = [-1f, 0f, 1f, 2f, 3f]; + let v = [-1f, 0f, 1f, 2f, 3f]/~; // Trailing expressions require parentheses: let y = vec::foldl(0f, v) { |x, y| x + y } + 10f; diff --git a/src/test/run-pass/block-arg-can-be-followed-by-block-arg.rs b/src/test/run-pass/block-arg-can-be-followed-by-block-arg.rs index 71342eee6e8..57ced911004 100644 --- a/src/test/run-pass/block-arg-can-be-followed-by-block-arg.rs +++ b/src/test/run-pass/block-arg-can-be-followed-by-block-arg.rs @@ -1,6 +1,6 @@ fn main() { fn f(i: fn() -> uint) -> uint { i() } - let v = [-1f, 0f, 1f, 2f, 3f]; + let v = [-1f, 0f, 1f, 2f, 3f]/~; let z = vec::foldl(f, v) { |x, _y| x } { || 22u }; assert z == 22u; } diff --git a/src/test/run-pass/block-arg-can-be-followed-by-call.rs b/src/test/run-pass/block-arg-can-be-followed-by-call.rs index b570a8bdd82..31bb412de3b 100644 --- a/src/test/run-pass/block-arg-can-be-followed-by-call.rs +++ b/src/test/run-pass/block-arg-can-be-followed-by-call.rs @@ -1,6 +1,6 @@ fn main() { fn f(i: uint) -> uint { i } - let v = [-1f, 0f, 1f, 2f, 3f]; + let v = [-1f, 0f, 1f, 2f, 3f]/~; let z = vec::foldl(f, v) { |x, _y| x } (22u); assert z == 22u; } diff --git a/src/test/run-pass/block-arg-in-parentheses.rs b/src/test/run-pass/block-arg-in-parentheses.rs index 4a5a0fbea07..9d51c993898 100644 --- a/src/test/run-pass/block-arg-in-parentheses.rs +++ b/src/test/run-pass/block-arg-in-parentheses.rs @@ -1,26 +1,26 @@ -fn w_semi(v: [int]) -> int { +fn w_semi(v: [int]/~) -> int { // the semicolon causes compiler not to // complain about the ignored return value: vec::foldl(0, v) {|x,y| x+y}; -10 } -fn w_paren1(v: [int]) -> int { +fn w_paren1(v: [int]/~) -> int { (vec::foldl(0, v) {|x,y| x+y}) - 10 } -fn w_paren2(v: [int]) -> int { +fn w_paren2(v: [int]/~) -> int { (vec::foldl(0, v) {|x,y| x+y} - 10) } -fn w_ret(v: [int]) -> int { +fn w_ret(v: [int]/~) -> int { ret vec::foldl(0, v) {|x,y| x+y} - 10; } fn main() { - assert w_semi([0, 1, 2, 3]) == -10; - assert w_paren1([0, 1, 2, 3]) == -4; - assert w_paren2([0, 1, 2, 3]) == -4; - assert w_ret([0, 1, 2, 3]) == -4; + assert w_semi([0, 1, 2, 3]/~) == -10; + assert w_paren1([0, 1, 2, 3]/~) == -4; + assert w_paren2([0, 1, 2, 3]/~) == -4; + assert w_ret([0, 1, 2, 3]/~) == -4; } diff --git a/src/test/run-pass/block-arg.rs b/src/test/run-pass/block-arg.rs index b13b819b2dc..f33d12f4361 100644 --- a/src/test/run-pass/block-arg.rs +++ b/src/test/run-pass/block-arg.rs @@ -1,6 +1,6 @@ // Check usage and precedence of block arguments in expressions: fn main() { - let v = [-1f, 0f, 1f, 2f, 3f]; + let v = [-1f, 0f, 1f, 2f, 3f]/~; // Statement form does not require parentheses: vec::iter(v) { |i| diff --git a/src/test/run-pass/block-iter-1.rs b/src/test/run-pass/block-iter-1.rs index 8e868a18612..12b7372227a 100644 --- a/src/test/run-pass/block-iter-1.rs +++ b/src/test/run-pass/block-iter-1.rs @@ -1,7 +1,7 @@ -fn iter_vec(v: [T], f: fn(T)) { for v.each {|x| f(x); } } +fn iter_vec(v: [T]/~, f: fn(T)) { for v.each {|x| f(x); } } fn main() { - let v = [1, 2, 3, 4, 5, 6, 7]; + let v = [1, 2, 3, 4, 5, 6, 7]/~; let mut odds = 0; iter_vec(v, {|i| log(error, i); diff --git a/src/test/run-pass/block-iter-2.rs b/src/test/run-pass/block-iter-2.rs index bcb1ea76151..ac96dbed60f 100644 --- a/src/test/run-pass/block-iter-2.rs +++ b/src/test/run-pass/block-iter-2.rs @@ -1,7 +1,7 @@ -fn iter_vec(v: [T], f: fn(T)) { for v.each {|x| f(x); } } +fn iter_vec(v: [T]/~, f: fn(T)) { for v.each {|x| f(x); } } fn main() { - let v = [1, 2, 3, 4, 5]; + let v = [1, 2, 3, 4, 5]/~; let mut sum = 0; iter_vec(v, {|i| iter_vec(v, {|j| diff --git a/src/test/run-pass/block-vec-map2.rs b/src/test/run-pass/block-vec-map2.rs index 0532f013383..087268010c8 100644 --- a/src/test/run-pass/block-vec-map2.rs +++ b/src/test/run-pass/block-vec-map2.rs @@ -3,9 +3,9 @@ import vec; fn main() { let v = - vec::map2([1, 2, 3, 4, 5], - [true, false, false, true, true], + vec::map2([1, 2, 3, 4, 5]/~, + [true, false, false, true, true]/~, {|i, b| if b { -i } else { i } }); log(error, v); - assert (v == [-1, 2, 3, -4, -5]); + assert (v == [-1, 2, 3, -4, -5]/~); } diff --git a/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs b/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs index a6279d12ab8..1795d3b5aed 100644 --- a/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs +++ b/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs @@ -4,10 +4,10 @@ fn want_slice(v: [int]/&) -> int { ret sum; } -fn has_mut_vec(+v: [mut int]) -> int { +fn has_mut_vec(+v: [mut int]/~) -> int { want_slice(v) } fn main() { - assert has_mut_vec([mut 1, 2, 3]) == 6; + assert has_mut_vec([mut 1, 2, 3]/~) == 6; } \ No newline at end of file diff --git a/src/test/run-pass/break.rs b/src/test/run-pass/break.rs index 4770b9ce886..008410c22e5 100644 --- a/src/test/run-pass/break.rs +++ b/src/test/run-pass/break.rs @@ -6,7 +6,7 @@ fn main() { assert (i == 10); loop { i += 1; if i == 20 { break; } } assert (i == 20); - for vec::each([1, 2, 3, 4, 5, 6]) {|x| + for vec::each([1, 2, 3, 4, 5, 6]/~) {|x| if x == 3 { break; } assert (x <= 3); } i = 0; @@ -16,7 +16,7 @@ fn main() { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); if i >= 10 { break; } } - for vec::each([1, 2, 3, 4, 5, 6]) {|x| + for vec::each([1, 2, 3, 4, 5, 6]/~) {|x| if x % 2 == 0 { cont; } assert (x % 2 != 0); } diff --git a/src/test/run-pass/cci_iter_exe.rs b/src/test/run-pass/cci_iter_exe.rs index 27fd452c8ed..4535e0a8a78 100644 --- a/src/test/run-pass/cci_iter_exe.rs +++ b/src/test/run-pass/cci_iter_exe.rs @@ -6,7 +6,7 @@ use cci_iter_lib; fn main() { //let bt0 = sys::rusti::frame_address(1u32); //#debug["%?", bt0]; - cci_iter_lib::iter([1, 2, 3]) {|i| + cci_iter_lib::iter([1, 2, 3]/~) {|i| io::print(#fmt["%d", i]); //assert bt0 == sys::rusti::frame_address(2u32); } diff --git a/src/test/run-pass/cci_no_inline_exe.rs b/src/test/run-pass/cci_no_inline_exe.rs index f9863f12b4f..1336cd77c6b 100644 --- a/src/test/run-pass/cci_no_inline_exe.rs +++ b/src/test/run-pass/cci_no_inline_exe.rs @@ -12,7 +12,7 @@ fn main() { // actually working. //let bt0 = sys::frame_address(); //#debug["%?", bt0]; - iter([1u, 2u, 3u]) {|i| + iter([1u, 2u, 3u]/~) {|i| io::print(#fmt["%u\n", i]); //let bt1 = sys::frame_address(); diff --git a/src/test/run-pass/class-iface-bounded-param.rs b/src/test/run-pass/class-iface-bounded-param.rs index 4da75037421..e6af2bd60bd 100644 --- a/src/test/run-pass/class-iface-bounded-param.rs +++ b/src/test/run-pass/class-iface-bounded-param.rs @@ -19,5 +19,5 @@ fn main() { let m = int_hash(); m.insert(1, 2); m.insert(3, 4); - assert iter::to_vec(keys(m)) == [1, 3]; + assert iter::to_vec(keys(m)) == [1, 3]/~; } diff --git a/src/test/run-pass/class-implements-multiple-ifaces.rs b/src/test/run-pass/class-implements-multiple-ifaces.rs index fe3f160c1f8..3e1a9895665 100644 --- a/src/test/run-pass/class-implements-multiple-ifaces.rs +++ b/src/test/run-pass/class-implements-multiple-ifaces.rs @@ -18,7 +18,7 @@ iface bitey { fn bite() -> body_part; } -fn vec_includes(xs: [T], x: T) -> bool { +fn vec_includes(xs: [T]/~, x: T) -> bool { for each(xs) {|y| if y == x { ret true; }} ret false; } @@ -53,7 +53,7 @@ class cat : noisy, scratchy, bitey { let t : hashmap = hashmap::(hsher, eqer); self.bite_counts = t; - iter([finger, toe, nose, ear]) {|p| + iter([finger, toe, nose, ear]/~) {|p| self.bite_counts.insert(p, 0u); }; } @@ -61,7 +61,7 @@ class cat : noisy, scratchy, bitey { fn speak() -> int { self.meow() as int } fn meow_count() -> uint { *self.meows } fn scratch() -> option { - let all = [chair, couch, bed]; + let all = [chair, couch, bed]/~; log(error, self.scratched); let mut rslt = none; for each(all) {|thing| if !self.scratched.contains(thing) { @@ -71,7 +71,7 @@ class cat : noisy, scratchy, bitey { } fn bite() -> body_part { #error("In bite()"); - let all = [toe, nose, ear]; + let all = [toe, nose, ear]/~; let mut min = finger; iter(all) {|next| #debug("min = %?", min); @@ -92,7 +92,7 @@ fn annoy_neighbors(critter: T) { } fn bite_everything(critter: T) -> bool { - let mut left : [body_part] = [finger, toe, nose, ear]; + let mut left : [body_part]/~ = [finger, toe, nose, ear]/~; while vec::len(left) > 0u { let part = critter.bite(); #debug("%? %?", left, part); diff --git a/src/test/run-pass/class-poly-methods-cross-crate.rs b/src/test/run-pass/class-poly-methods-cross-crate.rs index 28ddbfdd1e2..b21f1d4f709 100644 --- a/src/test/run-pass/class-poly-methods-cross-crate.rs +++ b/src/test/run-pass/class-poly-methods-cross-crate.rs @@ -4,11 +4,11 @@ use cci_class_6; import cci_class_6::kitties::*; fn main() { - let nyan : cat = cat::(52u, 99, ['p']); + let nyan : cat = cat::(52u, 99, ['p']/~); let kitty = cat(1000u, 2, ["tabby"]); assert(nyan.how_hungry == 99); assert(kitty.how_hungry == 2); - nyan.speak([1u,2u,3u]); + nyan.speak([1u,2u,3u]/~); assert(nyan.meow_count() == 55u); kitty.speak(["meow", "mew", "purr", "chirp"]); assert(kitty.meow_count() == 1004u); diff --git a/src/test/run-pass/class-poly-methods.rs b/src/test/run-pass/class-poly-methods.rs index 0d738913ee5..0b6b3865d56 100644 --- a/src/test/run-pass/class-poly-methods.rs +++ b/src/test/run-pass/class-poly-methods.rs @@ -1,27 +1,27 @@ class cat { priv { - let mut info : [U]; + let mut info : [U]/~; let mut meows : uint; } let how_hungry : int; - new(in_x : uint, in_y : int, -in_info: [U]) + new(in_x : uint, in_y : int, -in_info: [U]/~) { self.meows = in_x; self.how_hungry = in_y; self.info <- in_info; } - fn speak(stuff: [T]) { + fn speak(stuff: [T]/~) { self.meows += stuff.len(); } fn meow_count() -> uint { self.meows } } fn main() { - let nyan : cat = cat::(52u, 99, [9]); + let nyan : cat = cat::(52u, 99, [9]/~); let kitty = cat(1000u, 2, ["tabby"]); assert(nyan.how_hungry == 99); assert(kitty.how_hungry == 2); - nyan.speak([1,2,3]); + nyan.speak([1,2,3]/~); assert(nyan.meow_count() == 55u); kitty.speak(["meow", "mew", "purr", "chirp"]); assert(kitty.meow_count() == 1004u); diff --git a/src/test/run-pass/const-bound.rs b/src/test/run-pass/const-bound.rs index ac532ea0569..f2a436e0374 100644 --- a/src/test/run-pass/const-bound.rs +++ b/src/test/run-pass/const-bound.rs @@ -7,7 +7,7 @@ fn foo(x: T) -> T { x } fn main() { foo(1); foo("hi"); - foo([1, 2, 3]); + foo([1, 2, 3]/~); foo({field: 42}); foo((1, 2u)); foo(@1); diff --git a/src/test/run-pass/cycle-collection4.rs b/src/test/run-pass/cycle-collection4.rs index ccbcdf6037b..fbe6389fc87 100644 --- a/src/test/run-pass/cycle-collection4.rs +++ b/src/test/run-pass/cycle-collection4.rs @@ -1,10 +1,10 @@ type foo = { mut z : fn@() }; fn nop() { } -fn nop_foo(_y: [int], _x : @foo) { } +fn nop_foo(_y: [int]/~, _x : @foo) { } fn main() { let w = @{ mut z: {||nop()} }; - let x = {||nop_foo([], w)}; + let x = {||nop_foo([]/~, w)}; w.z = x; } \ No newline at end of file diff --git a/src/test/run-pass/deep-vector.rs b/src/test/run-pass/deep-vector.rs index 7952bf84f77..c7bb5e3f48c 100644 --- a/src/test/run-pass/deep-vector.rs +++ b/src/test/run-pass/deep-vector.rs @@ -1998,5 +1998,5 @@ fn main() { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ]; + ]/~; } \ No newline at end of file diff --git a/src/test/run-pass/deep-vector2.rs b/src/test/run-pass/deep-vector2.rs index 2db5841ac1d..7a2eaa37f24 100644 --- a/src/test/run-pass/deep-vector2.rs +++ b/src/test/run-pass/deep-vector2.rs @@ -7998,5 +7998,5 @@ fn main() { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ]; + ]/~; } \ No newline at end of file diff --git a/src/test/run-pass/dvec-test.rs b/src/test/run-pass/dvec-test.rs index 15441ee1034..d3e2038922f 100644 --- a/src/test/run-pass/dvec-test.rs +++ b/src/test/run-pass/dvec-test.rs @@ -4,17 +4,17 @@ fn main() { let d = dvec(); d.push(3); d.push(4); - assert d.get() == [3, 4]; - d.set([mut 5]); + assert d.get() == [3, 4]/~; + d.set([mut 5]/~); d.push(6); d.push(7); d.push(8); d.push(9); d.push(10); - d.push_all([11, 12, 13]); - d.push_slice([11, 12, 13], 1u, 2u); + d.push_all([11, 12, 13]/~); + d.push_slice([11, 12, 13]/~, 1u, 2u); - let exp = [5, 6, 7, 8, 9, 10, 11, 12, 13, 12]; + let exp = [5, 6, 7, 8, 9, 10, 11, 12, 13, 12]/~; assert d.get() == exp; assert d.get() == exp; assert d.len() == exp.len(); diff --git a/src/test/run-pass/empty-mutable-vec.rs b/src/test/run-pass/empty-mutable-vec.rs index eaec2ab31d5..cb2c13a8844 100644 --- a/src/test/run-pass/empty-mutable-vec.rs +++ b/src/test/run-pass/empty-mutable-vec.rs @@ -1,3 +1,3 @@ -fn main() { let v: [mut int] = [mut]; } +fn main() { let v: [mut int]/~ = [mut]/~; } diff --git a/src/test/run-pass/expr-alt-fail.rs b/src/test/run-pass/expr-alt-fail.rs index 7dfffcc1120..47c2c46a532 100644 --- a/src/test/run-pass/expr-alt-fail.rs +++ b/src/test/run-pass/expr-alt-fail.rs @@ -4,7 +4,7 @@ fn test_simple() { } fn test_box() { - let r = alt true { true { [10] } false { fail } }; + let r = alt true { true { [10]/~ } false { fail } }; assert (r[0] == 10); } diff --git a/src/test/run-pass/expr-fn.rs b/src/test/run-pass/expr-fn.rs index a3bd2449bb2..a872884f95c 100644 --- a/src/test/run-pass/expr-fn.rs +++ b/src/test/run-pass/expr-fn.rs @@ -4,7 +4,7 @@ fn test_int() { } fn test_vec() { - fn f() -> [int] { [10, 11] } + fn f() -> [int]/~ { [10, 11]/~ } assert (f()[1] == 11); } diff --git a/src/test/run-pass/for-destruct.rs b/src/test/run-pass/for-destruct.rs index 71480c47a75..ef28e7255c2 100644 --- a/src/test/run-pass/for-destruct.rs +++ b/src/test/run-pass/for-destruct.rs @@ -1,5 +1,5 @@ fn main() { - for vec::each([{x: 10, y: 20}, {x: 30, y: 0}]) {|elt| + for vec::each([{x: 10, y: 20}, {x: 30, y: 0}]/~) {|elt| assert (elt.x + elt.y == 30); } } diff --git a/src/test/run-pass/for-loop-fail.rs b/src/test/run-pass/for-loop-fail.rs index 177fd1f5c54..0c72a5d05f8 100644 --- a/src/test/run-pass/for-loop-fail.rs +++ b/src/test/run-pass/for-loop-fail.rs @@ -1 +1 @@ -fn main() { let x: [int] = []; for x.each {|_i| fail "moop"; } } +fn main() { let x: [int]/~ = []/~; for x.each {|_i| fail "moop"; } } diff --git a/src/test/run-pass/foreach-nested.rs b/src/test/run-pass/foreach-nested.rs index 3cc0a5a82f1..007117b04cb 100644 --- a/src/test/run-pass/foreach-nested.rs +++ b/src/test/run-pass/foreach-nested.rs @@ -5,7 +5,7 @@ fn two(it: fn(int)) { it(0); it(1); } fn main() { - let a: [mut int] = [mut -1, -1, -1, -1]; + let a: [mut int]/~ = [mut -1, -1, -1, -1]/~; let mut p: int = 0; two {|i| two {|j| a[p] = 10 * i + j; p += 1; }; diff --git a/src/test/run-pass/generic-ivec-leak.rs b/src/test/run-pass/generic-ivec-leak.rs index 1e7de587fb9..22726635da5 100644 --- a/src/test/run-pass/generic-ivec-leak.rs +++ b/src/test/run-pass/generic-ivec-leak.rs @@ -1,4 +1,4 @@ enum wrapper { wrapped(T), } -fn main() { let w = wrapped([1, 2, 3, 4, 5]); } +fn main() { let w = wrapped([1, 2, 3, 4, 5]/~); } diff --git a/src/test/run-pass/generic-ivec.rs b/src/test/run-pass/generic-ivec.rs index 26298de5b94..c7d5828fbcd 100644 --- a/src/test/run-pass/generic-ivec.rs +++ b/src/test/run-pass/generic-ivec.rs @@ -1,3 +1,3 @@ fn f(v: @T) { } -fn main() { f(@[1, 2, 3, 4, 5]); } +fn main() { f(@[1, 2, 3, 4, 5]/~); } diff --git a/src/test/run-pass/hashmap-memory.rs b/src/test/run-pass/hashmap-memory.rs index 9d8e7ec116e..ccbb07fce1f 100644 --- a/src/test/run-pass/hashmap-memory.rs +++ b/src/test/run-pass/hashmap-memory.rs @@ -31,9 +31,9 @@ mod map_reduce { type mapper = native fn(str, putter); - enum ctrl_proto { find_reducer([u8], chan), mapper_done, } + enum ctrl_proto { find_reducer([u8]/~, chan), mapper_done, } - fn start_mappers(ctrl: chan, inputs: [str]) { + fn start_mappers(ctrl: chan, inputs: [str]/~) { for inputs.each {|i| task::spawn {|| map_task(ctrl, i); }; } @@ -63,7 +63,7 @@ mod map_reduce { send(ctrl, mapper_done); } - fn map_reduce(inputs: [str]) { + fn map_reduce(inputs: [str]/~) { let ctrl = port(); // This task becomes the master control task. It spawns others diff --git a/src/test/run-pass/iface-generic.rs b/src/test/run-pass/iface-generic.rs index 5ad911d553e..21345ed9850 100644 --- a/src/test/run-pass/iface-generic.rs +++ b/src/test/run-pass/iface-generic.rs @@ -12,20 +12,20 @@ impl of to_str for () { } iface map { - fn map(f: fn(T) -> U) -> [U]; + fn map(f: fn(T) -> U) -> [U]/~; } -impl of map for [T] { - fn map(f: fn(T) -> U) -> [U] { - let mut r = []; - for self.each {|x| r += [f(x)]; } +impl of map for [T]/~ { + fn map(f: fn(T) -> U) -> [U]/~ { + let mut r = []/~; + for self.each {|x| r += [f(x)]/~; } r } } -fn foo>(x: T) -> [str] { +fn foo>(x: T) -> [str]/~ { x.map({|_e| "hi" }) } -fn bar>(x: T) -> [str] { +fn bar>(x: T) -> [str]/~ { x.map({|_e| _e.to_str() }) } diff --git a/src/test/run-pass/iface-to-str.rs b/src/test/run-pass/iface-to-str.rs index cf30cefbd33..1490ec3d93f 100644 --- a/src/test/run-pass/iface-to-str.rs +++ b/src/test/run-pass/iface-to-str.rs @@ -6,7 +6,7 @@ impl of to_str for int { fn to_str() -> str { int::str(self) } } -impl of to_str for [T] { +impl of to_str for [T]/~ { fn to_str() -> str { "[" + str::connect(vec::map(self, {|e| e.to_str()}), ", ") + "]" } @@ -14,15 +14,15 @@ impl of to_str for [T] { fn main() { assert 1.to_str() == "1"; - assert [2, 3, 4].to_str() == "[2, 3, 4]"; + assert [2, 3, 4]/~.to_str() == "[2, 3, 4]"; fn indirect(x: T) -> str { x.to_str() + "!" } - assert indirect([10, 20]) == "[10, 20]!"; + assert indirect([10, 20]/~) == "[10, 20]!"; fn indirect2(x: T) -> str { indirect(x) } - assert indirect2([1]) == "[1]!"; + assert indirect2([1]/~) == "[1]!"; } diff --git a/src/test/run-pass/impl-variance.rs b/src/test/run-pass/impl-variance.rs index 5e0b7b26268..b78c8241efa 100644 --- a/src/test/run-pass/impl-variance.rs +++ b/src/test/run-pass/impl-variance.rs @@ -1,12 +1,12 @@ -impl extensions for [const T] { +impl extensions for [const T]/~ { fn foo() -> uint { vec::len(self) } } fn main() { - let v = [const 0]; + let v = [const 0]/~; assert v.foo() == 1u; - let v = [0]; + let v = [0]/~; assert v.foo() == 1u; - let v = [mut 0]; + let v = [mut 0]/~; assert v.foo() == 1u; } \ No newline at end of file diff --git a/src/test/run-pass/import-glob-crate.rs b/src/test/run-pass/import-glob-crate.rs index c1473b1a0d4..92519f9945f 100644 --- a/src/test/run-pass/import-glob-crate.rs +++ b/src/test/run-pass/import-glob-crate.rs @@ -4,6 +4,6 @@ import vec::*; fn main() { let mut v = from_elem(0u, 0); - v = vec::append(v, [4, 2]); - assert (reversed(v) == [2, 4]); + v = vec::append(v, [4, 2]/~); + assert (reversed(v) == [2, 4]/~); } diff --git a/src/test/run-pass/import-in-block.rs b/src/test/run-pass/import-in-block.rs index b8a74753895..33876d9800e 100644 --- a/src/test/run-pass/import-in-block.rs +++ b/src/test/run-pass/import-in-block.rs @@ -3,9 +3,9 @@ use std; fn main() { import vec; import vec::to_mut; - log(debug, vec::len(to_mut([1, 2]))); + log(debug, vec::len(to_mut([1, 2]/~))); { import vec::*; - log(debug, len([2])); + log(debug, len([2]/~)); } } diff --git a/src/test/run-pass/import4.rs b/src/test/run-pass/import4.rs index 42a0bc0fb73..a307e7ac190 100644 --- a/src/test/run-pass/import4.rs +++ b/src/test/run-pass/import4.rs @@ -5,4 +5,4 @@ mod zed { fn bar() { #debug("bar"); } } -fn main(args: [str]) { let zed = 42; bar(); } +fn main(args: [str]/~) { let zed = 42; bar(); } diff --git a/src/test/run-pass/import5.rs b/src/test/run-pass/import5.rs index f1b0c5daac9..11c5a31fdbb 100644 --- a/src/test/run-pass/import5.rs +++ b/src/test/run-pass/import5.rs @@ -7,4 +7,4 @@ mod foo { } } -fn main(args: [str]) { bar(); } +fn main(args: [str]/~) { bar(); } diff --git a/src/test/run-pass/import7.rs b/src/test/run-pass/import7.rs index d8a8e5467ea..a63673214a2 100644 --- a/src/test/run-pass/import7.rs +++ b/src/test/run-pass/import7.rs @@ -12,4 +12,4 @@ mod bar { mod zed { } } } -fn main(args: [str]) { baz(); } +fn main(args: [str]/~) { baz(); } diff --git a/src/test/run-pass/infer-fn-tail-expr.rs b/src/test/run-pass/infer-fn-tail-expr.rs index 4658ea4329c..d9fb1216a4e 100644 --- a/src/test/run-pass/infer-fn-tail-expr.rs +++ b/src/test/run-pass/infer-fn-tail-expr.rs @@ -1,5 +1,5 @@ // issue #680 -fn f() -> [int] { [] } +fn f() -> [int]/~ { []/~ } fn main() { } diff --git a/src/test/run-pass/integral-indexing.rs b/src/test/run-pass/integral-indexing.rs index 0351736b2a3..c41ff28a2d8 100644 --- a/src/test/run-pass/integral-indexing.rs +++ b/src/test/run-pass/integral-indexing.rs @@ -3,7 +3,7 @@ // This is a testcase for issue #94. fn main() { - let v: [int] = [0, 1, 2, 3, 4, 5]; + let v: [int]/~ = [0, 1, 2, 3, 4, 5]/~; let s: str = "abcdef"; assert (v[3u] == 3); assert (v[3u8] == 3); diff --git a/src/test/run-pass/issue-1821.rs b/src/test/run-pass/issue-1821.rs index 9bb5ed0c2ef..07100a20860 100644 --- a/src/test/run-pass/issue-1821.rs +++ b/src/test/run-pass/issue-1821.rs @@ -1,5 +1,5 @@ // Issue #1821 - Don't recurse trying to typecheck this enum t { - foo([t]) + foo([t]/~) } fn main() {} \ No newline at end of file diff --git a/src/test/run-pass/issue-1989.rs b/src/test/run-pass/issue-1989.rs index 92b7709a8a1..07d13b56edc 100644 --- a/src/test/run-pass/issue-1989.rs +++ b/src/test/run-pass/issue-1989.rs @@ -19,6 +19,6 @@ fn empty_pointy() -> @pointy { fn main() { - let v = [empty_pointy(), empty_pointy()]; + let v = [empty_pointy(), empty_pointy()]/~; v[0].a = p(v[0]); } diff --git a/src/test/run-pass/issue-2101.rs b/src/test/run-pass/issue-2101.rs index 35434e11f61..4ae5a11c566 100644 --- a/src/test/run-pass/issue-2101.rs +++ b/src/test/run-pass/issue-2101.rs @@ -9,7 +9,7 @@ fn init(ar: &a.arena::arena, str: str) -> &a.hold { new(*ar) s(str) } -fn main(args: [str]) { +fn main(args: [str]/~) { let ar = arena::arena(); let leak = init(&ar, args[0]); alt *leak { diff --git a/src/test/run-pass/issue-2502.rs b/src/test/run-pass/issue-2502.rs index 6aa35169dd5..add0c3e6dff 100644 --- a/src/test/run-pass/issue-2502.rs +++ b/src/test/run-pass/issue-2502.rs @@ -1,11 +1,11 @@ class font/& { - let fontbuf: &self.[u8]; + let fontbuf: &self.[u8]/~; - new(fontbuf: &self.[u8]) { + new(fontbuf: &self.[u8]/~) { self.fontbuf = fontbuf; } - fn buf() -> &self.[u8] { + fn buf() -> &self.[u8]/~ { self.fontbuf } } diff --git a/src/test/run-pass/issue-2611.rs b/src/test/run-pass/issue-2611.rs index ebd77749354..dc2a2f1b005 100644 --- a/src/test/run-pass/issue-2611.rs +++ b/src/test/run-pass/issue-2611.rs @@ -3,7 +3,7 @@ import iter; import iter::base_iter; impl Q for base_iter { - fn flat_map_to_vec>(op: fn(B) -> IB) -> [B] { + fn flat_map_to_vec>(op: fn(B) -> IB) -> [B]/~ { iter::flat_map_to_vec(self, op) } } diff --git a/src/test/run-pass/issue-687.rs b/src/test/run-pass/issue-687.rs index 1bb4f69fb02..a721aa0ddea 100644 --- a/src/test/run-pass/issue-687.rs +++ b/src/test/run-pass/issue-687.rs @@ -7,16 +7,16 @@ import comm::port; import comm::recv; import comm::send; -enum msg { closed, received([u8]), } +enum msg { closed, received([u8]/~), } -fn producer(c: chan<[u8]>) { - send(c, [1u8, 2u8, 3u8, 4u8]); - let empty: [u8] = []; +fn producer(c: chan<[u8]/~>) { + send(c, [1u8, 2u8, 3u8, 4u8]/~); + let empty: [u8]/~ = []/~; send(c, empty); } -fn packager(cb: chan>, msg: chan) { - let p: port<[u8]> = port(); +fn packager(cb: chan>, msg: chan) { + let p: port<[u8]/~> = port(); send(cb, chan(p)); loop { #debug("waiting for bytes"); @@ -39,11 +39,11 @@ fn packager(cb: chan>, msg: chan) { fn main() { let p: port = port(); let ch = chan(p); - let recv_reader: port> = port(); + let recv_reader: port> = port(); let recv_reader_chan = chan(recv_reader); let pack = task::spawn {|| packager(recv_reader_chan, ch); }; - let source_chan: chan<[u8]> = recv(recv_reader); + let source_chan: chan<[u8]/~> = recv(recv_reader); let prod = task::spawn {|| producer(source_chan); }; loop { diff --git a/src/test/run-pass/iter-all.rs b/src/test/run-pass/iter-all.rs index 1de45c97d19..d2625414399 100644 --- a/src/test/run-pass/iter-all.rs +++ b/src/test/run-pass/iter-all.rs @@ -1,9 +1,9 @@ fn is_even(&&x: uint) -> bool { (x % 2u) == 0u } fn main() { - assert ![1u, 2u].all(is_even); - assert [2u, 4u].all(is_even); - assert [].all(is_even); + assert ![1u, 2u]/~.all(is_even); + assert [2u, 4u]/~.all(is_even); + assert []/~.all(is_even); assert !some(1u).all(is_even); assert some(2u).all(is_even); diff --git a/src/test/run-pass/iter-any.rs b/src/test/run-pass/iter-any.rs index 40cd8a18039..8cba862d8fa 100644 --- a/src/test/run-pass/iter-any.rs +++ b/src/test/run-pass/iter-any.rs @@ -1,9 +1,9 @@ fn is_even(&&x: uint) -> bool { (x % 2u) == 0u } fn main() { - assert ![1u, 3u].any(is_even); - assert [1u, 2u].any(is_even); - assert ![].any(is_even); + assert ![1u, 3u]/~.any(is_even); + assert [1u, 2u]/~.any(is_even); + assert ![]/~.any(is_even); assert !some(1u).any(is_even); assert some(2u).any(is_even); diff --git a/src/test/run-pass/iter-contains.rs b/src/test/run-pass/iter-contains.rs index 5af80d47a10..bd5e7a4bf52 100644 --- a/src/test/run-pass/iter-contains.rs +++ b/src/test/run-pass/iter-contains.rs @@ -1,9 +1,9 @@ fn main() { - assert [].contains(22u) == false; - assert [1u, 3u].contains(22u) == false; - assert [22u, 1u, 3u].contains(22u) == true; - assert [1u, 22u, 3u].contains(22u) == true; - assert [1u, 3u, 22u].contains(22u) == true; + assert []/~.contains(22u) == false; + assert [1u, 3u]/~.contains(22u) == false; + assert [22u, 1u, 3u]/~.contains(22u) == true; + assert [1u, 22u, 3u]/~.contains(22u) == true; + assert [1u, 3u, 22u]/~.contains(22u) == true; assert none.contains(22u) == false; assert some(1u).contains(22u) == false; assert some(22u).contains(22u) == true; diff --git a/src/test/run-pass/iter-count.rs b/src/test/run-pass/iter-count.rs index 54c9c1e2b73..3cca6d748c8 100644 --- a/src/test/run-pass/iter-count.rs +++ b/src/test/run-pass/iter-count.rs @@ -1,8 +1,8 @@ fn main() { - assert [].count(22u) == 0u; - assert [1u, 3u].count(22u) == 0u; - assert [22u, 1u, 3u].count(22u) == 1u; - assert [22u, 1u, 22u].count(22u) == 2u; + assert []/~.count(22u) == 0u; + assert [1u, 3u]/~.count(22u) == 0u; + assert [22u, 1u, 3u]/~.count(22u) == 1u; + assert [22u, 1u, 22u]/~.count(22u) == 2u; assert none.count(22u) == 0u; assert some(1u).count(22u) == 0u; assert some(22u).count(22u) == 1u; diff --git a/src/test/run-pass/iter-eachi.rs b/src/test/run-pass/iter-eachi.rs index 30efd24a55f..432a0080143 100644 --- a/src/test/run-pass/iter-eachi.rs +++ b/src/test/run-pass/iter-eachi.rs @@ -1,6 +1,6 @@ fn main() { let mut c = 0u; - for [1u, 2u, 3u, 4u, 5u].eachi { |i, v| + for [1u, 2u, 3u, 4u, 5u]/~.eachi { |i, v| assert (i + 1u) == v; c += 1u; } diff --git a/src/test/run-pass/iter-filter-to-vec.rs b/src/test/run-pass/iter-filter-to-vec.rs index 91b4eeffe7d..c16cdc69ad1 100644 --- a/src/test/run-pass/iter-filter-to-vec.rs +++ b/src/test/run-pass/iter-filter-to-vec.rs @@ -1,9 +1,9 @@ fn is_even(&&x: uint) -> bool { (x % 2u) == 0u } fn main() { - assert [1u, 3u].filter_to_vec(is_even) == []; - assert [1u, 2u, 3u].filter_to_vec(is_even) == [2u]; - assert none.filter_to_vec(is_even) == []; - assert some(1u).filter_to_vec(is_even) == []; - assert some(2u).filter_to_vec(is_even) == [2u]; + assert [1u, 3u]/~.filter_to_vec(is_even) == []/~; + assert [1u, 2u, 3u]/~.filter_to_vec(is_even) == [2u]/~; + assert none.filter_to_vec(is_even) == []/~; + assert some(1u).filter_to_vec(is_even) == []/~; + assert some(2u).filter_to_vec(is_even) == [2u]/~; } \ No newline at end of file diff --git a/src/test/run-pass/iter-flat-map-to-vec.rs b/src/test/run-pass/iter-flat-map-to-vec.rs index d4395b487f4..92fdafc7f95 100644 --- a/src/test/run-pass/iter-flat-map-to-vec.rs +++ b/src/test/run-pass/iter-flat-map-to-vec.rs @@ -1,21 +1,21 @@ // xfail-test -- flat_map_to_vec currently disable -fn repeat(&&x: uint) -> [uint] { [x, x] } +fn repeat(&&x: uint) -> [uint]/~ { [x, x]/~ } fn incd_if_even(&&x: uint) -> option { if (x % 2u) == 0u {some(x + 1u)} else {none} } fn main() { - assert [1u, 3u].flat_map_to_vec(repeat) == [1u, 1u, 3u, 3u]; - assert [].flat_map_to_vec(repeat) == []; - assert none.flat_map_to_vec(repeat) == []; - assert some(1u).flat_map_to_vec(repeat) == [1u, 1u]; - assert some(2u).flat_map_to_vec(repeat) == [2u, 2u]; + assert [1u, 3u]/~.flat_map_to_vec(repeat) == [1u, 1u, 3u, 3u]/~; + assert []/~.flat_map_to_vec(repeat) == []/~; + assert none.flat_map_to_vec(repeat) == []/~; + assert some(1u).flat_map_to_vec(repeat) == [1u, 1u]/~; + assert some(2u).flat_map_to_vec(repeat) == [2u, 2u]/~; - assert [1u, 2u, 5u].flat_map_to_vec(incd_if_even) == [3u]; - assert [].flat_map_to_vec(incd_if_even) == []; - assert none.flat_map_to_vec(incd_if_even) == []; - assert some(1u).flat_map_to_vec(incd_if_even) == []; - assert some(2u).flat_map_to_vec(incd_if_even) == [3u]; + assert [1u, 2u, 5u]/~.flat_map_to_vec(incd_if_even) == [3u]/~; + assert []/~.flat_map_to_vec(incd_if_even) == []/~; + assert none.flat_map_to_vec(incd_if_even) == []/~; + assert some(1u).flat_map_to_vec(incd_if_even) == []/~; + assert some(2u).flat_map_to_vec(incd_if_even) == [3u]/~; } \ No newline at end of file diff --git a/src/test/run-pass/iter-foldl.rs b/src/test/run-pass/iter-foldl.rs index 9f5fafcd5bc..d2678c5ba85 100644 --- a/src/test/run-pass/iter-foldl.rs +++ b/src/test/run-pass/iter-foldl.rs @@ -1,8 +1,8 @@ fn add(&&x: float, &&y: uint) -> float { x + (y as float) } fn main() { - assert [1u, 3u].foldl(20f, add) == 24f; - assert [].foldl(20f, add) == 20f; + assert [1u, 3u]/~.foldl(20f, add) == 24f; + assert []/~.foldl(20f, add) == 20f; assert none.foldl(20f, add) == 20f; assert some(1u).foldl(20f, add) == 21f; assert some(2u).foldl(20f, add) == 22f; diff --git a/src/test/run-pass/iter-map-to-vec.rs b/src/test/run-pass/iter-map-to-vec.rs index ea98196fbdd..68bf3248ecb 100644 --- a/src/test/run-pass/iter-map-to-vec.rs +++ b/src/test/run-pass/iter-map-to-vec.rs @@ -1,9 +1,9 @@ fn inc(&&x: uint) -> uint { x + 1u } fn main() { - assert [1u, 3u].map_to_vec(inc) == [2u, 4u]; - assert [1u, 2u, 3u].map_to_vec(inc) == [2u, 3u, 4u]; - assert none.map_to_vec(inc) == []; - assert some(1u).map_to_vec(inc) == [2u]; - assert some(2u).map_to_vec(inc) == [3u]; + assert [1u, 3u]/~.map_to_vec(inc) == [2u, 4u]/~; + assert [1u, 2u, 3u]/~.map_to_vec(inc) == [2u, 3u, 4u]/~; + assert none.map_to_vec(inc) == []/~; + assert some(1u).map_to_vec(inc) == [2u]/~; + assert some(2u).map_to_vec(inc) == [3u]/~; } \ No newline at end of file diff --git a/src/test/run-pass/iter-min-max.rs b/src/test/run-pass/iter-min-max.rs index 338ba795111..4c67670367e 100644 --- a/src/test/run-pass/iter-min-max.rs +++ b/src/test/run-pass/iter-min-max.rs @@ -1,11 +1,11 @@ fn is_even(&&x: uint) -> bool { (x % 2u) == 0u } fn main() { - assert [1u, 3u].min() == 1u; - assert [3u, 1u].min() == 1u; + assert [1u, 3u]/~.min() == 1u; + assert [3u, 1u]/~.min() == 1u; assert some(1u).min() == 1u; - assert [1u, 3u].max() == 3u; - assert [3u, 1u].max() == 3u; + assert [1u, 3u]/~.max() == 3u; + assert [3u, 1u]/~.max() == 3u; assert some(3u).max() == 3u; } \ No newline at end of file diff --git a/src/test/run-pass/iter-to-vec.rs b/src/test/run-pass/iter-to-vec.rs index 3b2f1b2ced5..c8fcb881934 100644 --- a/src/test/run-pass/iter-to-vec.rs +++ b/src/test/run-pass/iter-to-vec.rs @@ -1,8 +1,8 @@ fn main() { - assert [1u, 3u].to_vec() == [1u, 3u]; - let e: [uint] = []; - assert e.to_vec() == []; - assert none::.to_vec() == []; - assert some(1u).to_vec() == [1u]; - assert some(2u).to_vec() == [2u]; + assert [1u, 3u]/~.to_vec() == [1u, 3u]/~; + let e: [uint]/~ = []/~; + assert e.to_vec() == []/~; + assert none::.to_vec() == []/~; + assert some(1u).to_vec() == [1u]/~; + assert some(2u).to_vec() == [2u]/~; } \ No newline at end of file diff --git a/src/test/run-pass/ivec-add.rs b/src/test/run-pass/ivec-add.rs index b0289ceb64d..6ca1b2fb5d4 100644 --- a/src/test/run-pass/ivec-add.rs +++ b/src/test/run-pass/ivec-add.rs @@ -1,6 +1,6 @@ -fn double(a: T) -> [T] { ret [a] + [a]; } +fn double(a: T) -> [T]/~ { ret [a]/~ + [a]/~; } -fn double_int(a: int) -> [int] { ret [a] + [a]; } +fn double_int(a: int) -> [int]/~ { ret [a]/~ + [a]/~; } fn main() { let mut d = double(1); diff --git a/src/test/run-pass/ivec-pass-by-value.rs b/src/test/run-pass/ivec-pass-by-value.rs index 557c3dc9b3e..69c19f5d9b0 100644 --- a/src/test/run-pass/ivec-pass-by-value.rs +++ b/src/test/run-pass/ivec-pass-by-value.rs @@ -1,3 +1,3 @@ -fn f(a: [int]) { } -fn main() { f([1, 2, 3, 4, 5]); } +fn f(a: [int]/~) { } +fn main() { f([1, 2, 3, 4, 5]/~); } diff --git a/src/test/run-pass/ivec-tag.rs b/src/test/run-pass/ivec-tag.rs index dce947bb622..a1d0eccb50a 100644 --- a/src/test/run-pass/ivec-tag.rs +++ b/src/test/run-pass/ivec-tag.rs @@ -7,16 +7,16 @@ import comm::port; import comm::send; import comm::recv; -fn producer(c: chan<[u8]>) { +fn producer(c: chan<[u8]/~>) { send(c, [1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8, 11u8, 12u8, - 13u8]); + 13u8]/~); } fn main() { - let p: port<[u8]> = port(); + let p: port<[u8]/~> = port(); let ch = chan(p); let prod = task::spawn {|| producer(ch); }; - let data: [u8] = recv(p); + let data: [u8]/~ = recv(p); } diff --git a/src/test/run-pass/lambda-infer-unresolved.rs b/src/test/run-pass/lambda-infer-unresolved.rs index 36b93774c86..3bf96f6dccb 100644 --- a/src/test/run-pass/lambda-infer-unresolved.rs +++ b/src/test/run-pass/lambda-infer-unresolved.rs @@ -1,7 +1,7 @@ // This should typecheck even though the type of e is not fully // resolved when we finish typechecking the fn@. fn main() { - let e = @{mut refs: [], n: 0}; + let e = @{mut refs: []/~, n: 0}; let f = fn@ () { log(error, e.n); }; - e.refs += [1]; + e.refs += [1]/~; } diff --git a/src/test/run-pass/linear-for-loop.rs b/src/test/run-pass/linear-for-loop.rs index f106e8c8a45..c963c781053 100644 --- a/src/test/run-pass/linear-for-loop.rs +++ b/src/test/run-pass/linear-for-loop.rs @@ -1,7 +1,7 @@ fn main() { - let x = [1, 2, 3]; + let x = [1, 2, 3]/~; let mut y = 0; for x.each {|i| log(debug, i); y += i; } log(debug, y); diff --git a/src/test/run-pass/liveness-move-in-loop.rs b/src/test/run-pass/liveness-move-in-loop.rs index a7f9547c059..edc3d97d794 100644 --- a/src/test/run-pass/liveness-move-in-loop.rs +++ b/src/test/run-pass/liveness-move-in-loop.rs @@ -1,11 +1,11 @@ fn take(-x: int) -> int {x} fn the_loop() { - let mut list = []; + let mut list = []/~; loop { let x = 5; if x > 3 { - list += [take(x)]; + list += [take(x)]/~; } else { break; } diff --git a/src/test/run-pass/log-knows-the-names-of-variants-in-std.rs b/src/test/run-pass/log-knows-the-names-of-variants-in-std.rs index 31f6b31fa37..78d912ff4c8 100644 --- a/src/test/run-pass/log-knows-the-names-of-variants-in-std.rs +++ b/src/test/run-pass/log-knows-the-names-of-variants-in-std.rs @@ -11,7 +11,7 @@ fn check_log(exp: str, v: T) { } fn main() { - let x = list::from_vec([a(22u), b("hi")]); + let x = list::from_vec([a(22u), b("hi")]/~); let exp = "@cons(a(22), @cons(b(~\"hi\"), @nil))"; assert #fmt["%?", x] == exp; check_log(exp, x); diff --git a/src/test/run-pass/log-linearized.rs b/src/test/run-pass/log-linearized.rs index 3e202e84b81..8bf9a94fa00 100644 --- a/src/test/run-pass/log-linearized.rs +++ b/src/test/run-pass/log-linearized.rs @@ -5,10 +5,10 @@ enum option { some(T), } -type smallintmap = @{mut v: [mut option]}; +type smallintmap = @{mut v: [mut option]/~}; fn mk() -> smallintmap { - let v: [mut option] = [mut]; + let v: [mut option]/~ = [mut]/~; ret @{mut v: v}; } diff --git a/src/test/run-pass/log-str.rs b/src/test/run-pass/log-str.rs index 0c32e92f71f..e9b0f1e0c6d 100644 --- a/src/test/run-pass/log-str.rs +++ b/src/test/run-pass/log-str.rs @@ -1,4 +1,4 @@ fn main() { - assert "~[1, 2, 3]" == sys::log_str([1, 2, 3]); + assert "~[1, 2, 3]" == sys::log_str([1, 2, 3]/~); assert #fmt["%?/%6?", [1, 2, 3], "hi"] == "~[1, 2, 3]/ ~\"hi\""; } diff --git a/src/test/run-pass/loop-scope.rs b/src/test/run-pass/loop-scope.rs index 3fe01a229ee..f43f8673a50 100644 --- a/src/test/run-pass/loop-scope.rs +++ b/src/test/run-pass/loop-scope.rs @@ -1,5 +1,5 @@ fn main() { - let x = [10, 20, 30]; + let x = [10, 20, 30]/~; let mut sum = 0; for x.each {|x| sum += x; } assert (sum == 60); diff --git a/src/test/run-pass/main-ivec.rs b/src/test/run-pass/main-ivec.rs index 8db23350855..b7c01eb9ad1 100644 --- a/src/test/run-pass/main-ivec.rs +++ b/src/test/run-pass/main-ivec.rs @@ -1 +1 @@ -fn main(args: [str]) { for args.each {|s| log(debug, s); } } +fn main(args: [str]/~) { for args.each {|s| log(debug, s); } } diff --git a/src/test/run-pass/maybe-mutable.rs b/src/test/run-pass/maybe-mutable.rs index 4f305ff8378..41a649052b7 100644 --- a/src/test/run-pass/maybe-mutable.rs +++ b/src/test/run-pass/maybe-mutable.rs @@ -2,15 +2,15 @@ // -*- rust -*- -fn len(v: [const int]) -> uint { +fn len(v: [const int]/~) -> uint { let mut i = 0u; while i < vec::len(v) { i += 1u; } ret i; } fn main() { - let v0 = [1, 2, 3, 4, 5]; + let v0 = [1, 2, 3, 4, 5]/~; log(debug, len(v0)); - let v1 = [mut 1, 2, 3, 4, 5]; + let v1 = [mut 1, 2, 3, 4, 5]/~; log(debug, len(v1)); } diff --git a/src/test/run-pass/mod-view-items.rs b/src/test/run-pass/mod-view-items.rs index 7046f1e3d78..c92821da978 100644 --- a/src/test/run-pass/mod-view-items.rs +++ b/src/test/run-pass/mod-view-items.rs @@ -7,7 +7,7 @@ mod m { use std; import vec; - fn f() -> [int] { vec::from_elem(1u, 0) } + fn f() -> [int]/~ { vec::from_elem(1u, 0) } } fn main() { let x = m::f(); } diff --git a/src/test/run-pass/monad.rs b/src/test/run-pass/monad.rs index d14de311d9e..e171cad58fb 100644 --- a/src/test/run-pass/monad.rs +++ b/src/test/run-pass/monad.rs @@ -1,6 +1,6 @@ -impl monad for [A] { - fn bind(f: fn(A) -> [B]) -> [B] { - let mut r = []; +impl monad for [A]/~ { + fn bind(f: fn(A) -> [B]/~) -> [B]/~ { + let mut r = []/~; for self.each {|elt| r += f(elt); } r } diff --git a/src/test/run-pass/morestack6.rs b/src/test/run-pass/morestack6.rs index 461be434b0a..10fe6d9a340 100644 --- a/src/test/run-pass/morestack6.rs +++ b/src/test/run-pass/morestack6.rs @@ -56,7 +56,7 @@ fn main() { calllink08, calllink09, calllink10 - ]; + ]/~; let rng = rand::rng(); for fns.each {|f| let sz = rng.next() % 256u32 + 256u32; diff --git a/src/test/run-pass/move-arg-2-unique.rs b/src/test/run-pass/move-arg-2-unique.rs index dde8e6d4d40..39a0ac1fe4a 100644 --- a/src/test/run-pass/move-arg-2-unique.rs +++ b/src/test/run-pass/move-arg-2-unique.rs @@ -1,10 +1,10 @@ fn test(-foo: ~[int]) { assert (foo[0] == 10); } fn main() { - let x = ~[10]; + let x = ~[10]/~; // Test forgetting a local by move-in test(x); // Test forgetting a temporary by move-in. - test(~[10]); + test(~[10]/~); } diff --git a/src/test/run-pass/move-arg-2.rs b/src/test/run-pass/move-arg-2.rs index 65a3913132c..d70ccc36dbe 100644 --- a/src/test/run-pass/move-arg-2.rs +++ b/src/test/run-pass/move-arg-2.rs @@ -1,10 +1,10 @@ fn test(-foo: @[int]) { assert (foo[0] == 10); } fn main() { - let x = @[10]; + let x = @[10]/~; // Test forgetting a local by move-in test(x); // Test forgetting a temporary by move-in. - test(@[10]); + test(@[10]/~); } diff --git a/src/test/run-pass/mutable-alias-vec.rs b/src/test/run-pass/mutable-alias-vec.rs index b56f80d9700..edc682b892d 100644 --- a/src/test/run-pass/mutable-alias-vec.rs +++ b/src/test/run-pass/mutable-alias-vec.rs @@ -3,10 +3,10 @@ // -*- rust -*- use std; -fn grow(&v: [int]) { v += [1]; } +fn grow(&v: [int]/~) { v += [1]/~; } fn main() { - let mut v: [int] = []; + let mut v: [int]/~ = []/~; grow(v); grow(v); grow(v); diff --git a/src/test/run-pass/mutable-huh-variance-vec1.rs b/src/test/run-pass/mutable-huh-variance-vec1.rs index 31cfab18ddc..403716321bc 100644 --- a/src/test/run-pass/mutable-huh-variance-vec1.rs +++ b/src/test/run-pass/mutable-huh-variance-vec1.rs @@ -1,12 +1,12 @@ // error-pattern: mismatched types fn main() { - let v = [[0]]; + let v = [[0]/~]/~; // This is ok because the outer vec is covariant with respect // to the inner vec. If the outer vec was mut then we // couldn't do this. - fn f(&&v: [[const int]]) { + fn f(&&v: [[const int]/~]/~) { } f(v); diff --git a/src/test/run-pass/mutable-huh-variance-vec2.rs b/src/test/run-pass/mutable-huh-variance-vec2.rs index 7815554cdab..56c8def9ac6 100644 --- a/src/test/run-pass/mutable-huh-variance-vec2.rs +++ b/src/test/run-pass/mutable-huh-variance-vec2.rs @@ -1,12 +1,12 @@ // error-pattern: mismatched types fn main() { - let v = [[0]]; + let v = [[0]/~]/~; // This is ok because the outer vec is covariant with respect // to the inner vec. If the outer vec was mut then we // couldn't do this. - fn f(&&v: [const [const int]]) { + fn f(&&v: [const [const int]/~]/~) { } f(v); diff --git a/src/test/run-pass/mutable-vec-drop.rs b/src/test/run-pass/mutable-vec-drop.rs index 2b1433d8a65..b7aa4278b49 100644 --- a/src/test/run-pass/mutable-vec-drop.rs +++ b/src/test/run-pass/mutable-vec-drop.rs @@ -1,6 +1,6 @@ fn main() { // This just tests whether the vec leaks its members. - let pvec: [mut @{a: int, b: int}] = - [mut @{a: 1, b: 2}, @{a: 3, b: 4}, @{a: 5, b: 6}]; + let pvec: [mut @{a: int, b: int}]/~ = + [mut @{a: 1, b: 2}, @{a: 3, b: 4}, @{a: 5, b: 6}]/~; } diff --git a/src/test/run-pass/native-fn-linkname.rs b/src/test/run-pass/native-fn-linkname.rs index 72d0ab01f96..eaa6522e2d1 100644 --- a/src/test/run-pass/native-fn-linkname.rs +++ b/src/test/run-pass/native-fn-linkname.rs @@ -12,7 +12,7 @@ native mod libc { fn strlen(str: str) -> uint unsafe { // C string is terminated with a zero - let bytes = str::bytes(str) + [0u8]; + let bytes = str::bytes(str) + [0u8]/~; ret libc::my_strlen(vec::unsafe::to_ptr(bytes)); } diff --git a/src/test/run-pass/native2.rs b/src/test/run-pass/native2.rs index 3ad9f330548..5fbef59101e 100644 --- a/src/test/run-pass/native2.rs +++ b/src/test/run-pass/native2.rs @@ -24,4 +24,4 @@ native mod libc { #[nolink] native mod baz { } -fn main(args: [str]) { } +fn main(args: [str]/~) { } diff --git a/src/test/run-pass/newtype-polymorphic.rs b/src/test/run-pass/newtype-polymorphic.rs index 69efb129146..4398f40a08a 100644 --- a/src/test/run-pass/newtype-polymorphic.rs +++ b/src/test/run-pass/newtype-polymorphic.rs @@ -1,11 +1,11 @@ -enum myvec = [X]; +enum myvec = [X]/~; -fn myvec_deref(mv: myvec) -> [X] { ret *mv; } +fn myvec_deref(mv: myvec) -> [X]/~ { ret *mv; } fn myvec_elt(mv: myvec) -> X { ret mv[0]; } fn main() { - let mv = myvec([1, 2, 3]); + let mv = myvec([1, 2, 3]/~); assert (myvec_deref(mv)[1] == 2); assert (myvec_elt(mv) == 1); assert (mv[2] == 3); diff --git a/src/test/run-pass/operator-overloading-leaks.rs b/src/test/run-pass/operator-overloading-leaks.rs index a67bffa2e92..07186db4da9 100644 --- a/src/test/run-pass/operator-overloading-leaks.rs +++ b/src/test/run-pass/operator-overloading-leaks.rs @@ -1,11 +1,11 @@ // The cases commented as "Leaks" need to not leak. Issue #2581 -impl methods for [T] { - fn -(x: [T]/&) -> [T] { +impl methods for [T]/~ { + fn -(x: [T]/&) -> [T]/~ { [x[0], x[0], x[0]] } - fn foo(x: [T]/&) -> [T] { + fn foo(x: [T]/&) -> [T]/~ { [x[0], x[0], x[0]] } } @@ -30,23 +30,23 @@ impl methods for @int { fn main() { // leaks - let mut bar = [1, 2, 3]; - bar -= [3, 2, 1]; - bar -= [4, 5, 6]; + let mut bar = [1, 2, 3]/~; + bar -= [3, 2, 1]/~; + bar -= [4, 5, 6]/~; io::println(#fmt("%?", bar)); // okay - let mut bar = [1, 2, 3]; - bar = bar.foo([3, 2, 1]); - bar = bar.foo([4, 5, 6]); + let mut bar = [1, 2, 3]/~; + bar = bar.foo([3, 2, 1]/~); + bar = bar.foo([4, 5, 6]/~); io::println(#fmt("%?", bar)); // okay - let mut bar = [1, 2, 3]; - bar = bar - [3, 2, 1]; - bar = bar - [4, 5, 6]; + let mut bar = [1, 2, 3]/~; + bar = bar - [3, 2, 1]/~; + bar = bar - [4, 5, 6]/~; io::println(#fmt("%?", bar)); diff --git a/src/test/run-pass/option-ext.rs b/src/test/run-pass/option-ext.rs index cb6b63059f4..cde7d73e720 100644 --- a/src/test/run-pass/option-ext.rs +++ b/src/test/run-pass/option-ext.rs @@ -1,4 +1,4 @@ -fn main(args: [str]) { +fn main(args: [str]/~) { let thing = "{{ f }}"; let f = str::find_str(thing, "{{"); diff --git a/src/test/run-pass/path.rs b/src/test/run-pass/path.rs index 4fe43d18121..96a7a7229b6 100644 --- a/src/test/run-pass/path.rs +++ b/src/test/run-pass/path.rs @@ -4,4 +4,4 @@ mod foo { fn bar(offset: uint) { } } -fn main(args: [str]) { foo::bar(0u); } +fn main(args: [str]/~) { foo::bar(0u); } diff --git a/src/test/run-pass/pure-sum.rs b/src/test/run-pass/pure-sum.rs index f5bc71d94b0..823f6751c23 100644 --- a/src/test/run-pass/pure-sum.rs +++ b/src/test/run-pass/pure-sum.rs @@ -1,6 +1,6 @@ // Check that pure functions can modify local state. -pure fn sums_to(v: [int], sum: int) -> bool { +pure fn sums_to(v: [int]/~, sum: int) -> bool { let mut i = 0u, sum0 = 0; while i < v.len() { sum0 += v[i]; @@ -9,7 +9,7 @@ pure fn sums_to(v: [int], sum: int) -> bool { ret sum0 == sum; } -pure fn sums_to_using_uniq(v: [int], sum: int) -> bool { +pure fn sums_to_using_uniq(v: [int]/~, sum: int) -> bool { let mut i = 0u, sum0 = ~mut 0; while i < v.len() { *sum0 += v[i]; @@ -18,7 +18,7 @@ pure fn sums_to_using_uniq(v: [int], sum: int) -> bool { ret *sum0 == sum; } -pure fn sums_to_using_rec(v: [int], sum: int) -> bool { +pure fn sums_to_using_rec(v: [int]/~, sum: int) -> bool { let mut i = 0u, sum0 = {f: 0}; while i < v.len() { sum0.f += v[i]; @@ -27,7 +27,7 @@ pure fn sums_to_using_rec(v: [int], sum: int) -> bool { ret sum0.f == sum; } -pure fn sums_to_using_uniq_rec(v: [int], sum: int) -> bool { +pure fn sums_to_using_uniq_rec(v: [int]/~, sum: int) -> bool { let mut i = 0u, sum0 = {f: ~mut 0}; while i < v.len() { *sum0.f += v[i]; diff --git a/src/test/run-pass/rcvr-borrowed-to-slice.rs b/src/test/run-pass/rcvr-borrowed-to-slice.rs index 38f9f685050..6718de9d9d6 100644 --- a/src/test/run-pass/rcvr-borrowed-to-slice.rs +++ b/src/test/run-pass/rcvr-borrowed-to-slice.rs @@ -10,17 +10,17 @@ impl foo/& for [int]/& { fn call_sum(x: [int]/&) -> int { x.sum() } fn main() { - let x = [1, 2, 3]; + let x = [1, 2, 3]/~; let y = call_sum(x); #debug["y==%d", y]; assert y == 6; - let x = [mut 1, 2, 3]; + let x = [mut 1, 2, 3]/~; let y = x.sum(); #debug["y==%d", y]; assert y == 6; - let x = [1, 2, 3]; + let x = [1, 2, 3]/~; let y = x.sum(); #debug["y==%d", y]; assert y == 6; diff --git a/src/test/run-pass/reflect-visit-data.rs b/src/test/run-pass/reflect-visit-data.rs index 391acdaa9a2..d0818bda7c4 100644 --- a/src/test/run-pass/reflect-visit-data.rs +++ b/src/test/run-pass/reflect-visit-data.rs @@ -250,14 +250,14 @@ impl ptr_visitor } fn visit_enter_vec(mtbl: uint) -> bool { - self.align_to::<[u8]>(); + self.align_to::<[u8]/~>(); if ! self.inner.visit_enter_vec(mtbl) { ret false; } true } fn visit_leave_vec(mtbl: uint) -> bool { if ! self.inner.visit_leave_vec(mtbl) { ret false; } - self.bump_past::<[u8]>(); + self.bump_past::<[u8]/~>(); true } @@ -547,7 +547,7 @@ impl ptr_visitor enum my_visitor = @{ mut ptr1: *c_void, mut ptr2: *c_void, - mut vals: [str] + mut vals: [str]/~ }; impl extra_methods for my_visitor { @@ -572,7 +572,7 @@ impl of ty_visitor for my_visitor { fn visit_bool() -> bool { /* self.get::() {|b| - self.vals += [bool::to_str(b)]; + self.vals += [bool::to_str(b)]/~; } */ true @@ -580,7 +580,7 @@ impl of ty_visitor for my_visitor { fn visit_int() -> bool { /* self.get::() {|i| - self.vals += [int::to_str(i, 10u)]; + self.vals += [int::to_str(i, 10u)]/~; } */ true @@ -699,7 +699,7 @@ fn main() { let p = ptr::addr_of(r) as *c_void; let u = my_visitor(@{mut ptr1: p, mut ptr2: p, - mut vals: []}); + mut vals: []/~}); let v = ptr_visit_adaptor({inner: u}); let vv = v as intrinsic::ty_visitor; intrinsic::visit_ty::<(int,int,int,bool,bool)>(vv); diff --git a/src/test/run-pass/reflect-visit-type.rs b/src/test/run-pass/reflect-visit-type.rs index 8e90e438068..33f5115df5f 100644 --- a/src/test/run-pass/reflect-visit-type.rs +++ b/src/test/run-pass/reflect-visit-type.rs @@ -1,7 +1,7 @@ // FIXME: un-xfail after snapshot // xfail-test -enum my_visitor = @{ mut types: [str] }; +enum my_visitor = @{ mut types: [str]/~ }; impl of intrinsic::ty_visitor for my_visitor { fn visit_bot() -> bool { @@ -139,14 +139,14 @@ impl of intrinsic::ty_visitor for my_visitor { } fn main() { - let v = my_visitor(@{mut types: []}); + let v = my_visitor(@{mut types: []/~}); let vv = v as intrinsic::ty_visitor; intrinsic::visit_ty::(vv); intrinsic::visit_ty::(vv); intrinsic::visit_ty::(vv); intrinsic::visit_ty::(vv); - intrinsic::visit_ty::<[int]>(vv); + intrinsic::visit_ty::<[int]/~>(vv); for (copy v.types).each {|s| io::println(#fmt("type: %s", s)); diff --git a/src/test/run-pass/regions-borrow-evec-uniq.rs b/src/test/run-pass/regions-borrow-evec-uniq.rs index 3cfcd015dde..8ee9dc7f958 100644 --- a/src/test/run-pass/regions-borrow-evec-uniq.rs +++ b/src/test/run-pass/regions-borrow-evec-uniq.rs @@ -7,7 +7,7 @@ fn main() { let r = foo(p); assert r == 1; - let p = [5,4,3,2,1]; + let p = [5,4,3,2,1]/~; let r = foo(p); assert r == 5; } diff --git a/src/test/run-pass/ret-break-cont-in-block.rs b/src/test/run-pass/ret-break-cont-in-block.rs index e93ce8a32e9..94d78b5f556 100644 --- a/src/test/run-pass/ret-break-cont-in-block.rs +++ b/src/test/run-pass/ret-break-cont-in-block.rs @@ -1,4 +1,4 @@ -fn iter(v: [T], it: fn(T) -> bool) { +fn iter(v: [T]/~, it: fn(T) -> bool) { let mut i = 0u, l = v.len(); while i < l { if !it(v[i]) { break; } @@ -6,7 +6,7 @@ fn iter(v: [T], it: fn(T) -> bool) { } } -fn find_pos(n: T, h: [T]) -> option { +fn find_pos(n: T, h: [T]/~) -> option { let mut i = 0u; for iter(h) {|e| if e == n { ret some(i); } @@ -15,7 +15,7 @@ fn find_pos(n: T, h: [T]) -> option { none } -fn bail_deep(x: [[bool]]) { +fn bail_deep(x: [[bool]/~]/~) { let mut seen = false; for iter(x) {|x| for iter(x) {|x| @@ -27,8 +27,8 @@ fn bail_deep(x: [[bool]]) { } fn ret_deep() -> str { - for iter([1, 2]) {|e| - for iter([3, 4]) {|x| + for iter([1, 2]/~) {|e| + for iter([3, 4]/~) {|x| if e + x > 4 { ret "hi"; } } } @@ -37,7 +37,7 @@ fn ret_deep() -> str { fn main() { let mut last = 0; - for vec::all([1, 2, 3, 4, 5, 6, 7]) {|e| + for vec::all([1, 2, 3, 4, 5, 6, 7]/~) {|e| last = e; if e == 5 { break; } if e % 2 == 1 { cont; } @@ -45,13 +45,13 @@ fn main() { }; assert last == 5; - assert find_pos(1, [0, 1, 2, 3]) == some(1u); - assert find_pos(1, [0, 4, 2, 3]) == none; + assert find_pos(1, [0, 1, 2, 3]/~) == some(1u); + assert find_pos(1, [0, 4, 2, 3]/~) == none; assert find_pos("hi", ["foo", "bar", "baz", "hi"]) == some(3u); - bail_deep([[false, false], [true, true], [false, true]]); - bail_deep([[true]]); - bail_deep([[false, false, false]]); + bail_deep([[false, false]/~, [true, true]/~, [false, true]/~]/~); + bail_deep([[true]/~]/~); + bail_deep([[false, false, false]/~]/~); assert ret_deep() == "hi"; } diff --git a/src/test/run-pass/sendfn-deep-copy.rs b/src/test/run-pass/sendfn-deep-copy.rs index 96e06e65c70..a5fd483e3f8 100644 --- a/src/test/run-pass/sendfn-deep-copy.rs +++ b/src/test/run-pass/sendfn-deep-copy.rs @@ -8,7 +8,7 @@ fn main() { test05(); } fn mk_counter() -> fn~(A) -> (A,uint) { // The only reason that the counter is generic is so that it closes // over both a type descriptor and some data. - let v = [mut 0u]; + let v = [mut 0u]/~; ret fn~(a: A) -> (A,uint) { let n = v[0]; v[0] = n + 1u; diff --git a/src/test/run-pass/seq-compare.rs b/src/test/run-pass/seq-compare.rs index 79a318eefd2..423d755076f 100644 --- a/src/test/run-pass/seq-compare.rs +++ b/src/test/run-pass/seq-compare.rs @@ -4,13 +4,13 @@ fn main() { assert ("hello" < "hellr"); assert ("hello " > "hello"); assert ("hello" != "there"); - assert ([1, 2, 3, 4] > [1, 2, 3]); - assert ([1, 2, 3] < [1, 2, 3, 4]); - assert ([1, 2, 4, 4] > [1, 2, 3, 4]); - assert ([1, 2, 3, 4] < [1, 2, 4, 4]); - assert ([1, 2, 3] <= [1, 2, 3]); - assert ([1, 2, 3] <= [1, 2, 3, 3]); - assert ([1, 2, 3, 4] > [1, 2, 3]); - assert ([1, 2, 3] == [1, 2, 3]); - assert ([1, 2, 3] != [1, 1, 3]); + assert ([1, 2, 3, 4]/~ > [1, 2, 3]/~); + assert ([1, 2, 3]/~ < [1, 2, 3, 4]/~); + assert ([1, 2, 4, 4]/~ > [1, 2, 3, 4]/~); + assert ([1, 2, 3, 4]/~ < [1, 2, 4, 4]/~); + assert ([1, 2, 3]/~ <= [1, 2, 3]/~); + assert ([1, 2, 3]/~ <= [1, 2, 3, 3]/~); + assert ([1, 2, 3, 4]/~ > [1, 2, 3]/~); + assert ([1, 2, 3]/~ == [1, 2, 3]/~); + assert ([1, 2, 3]/~ != [1, 1, 3]/~); } diff --git a/src/test/run-pass/shadow.rs b/src/test/run-pass/shadow.rs index 5ec60b38c10..11ee08d3c2e 100644 --- a/src/test/run-pass/shadow.rs +++ b/src/test/run-pass/shadow.rs @@ -1,7 +1,7 @@ // -*- rust -*- -fn foo(c: [int]) { +fn foo(c: [int]/~) { let a: int = 5; - let mut b: [int] = []; + let mut b: [int]/~ = []/~; alt none:: { @@ -9,7 +9,7 @@ fn foo(c: [int]) { for c.each {|i| log(debug, a); let a = 17; - b += [a]; + b += [a]/~; } } _ { } @@ -18,4 +18,4 @@ fn foo(c: [int]) { enum t { none, some(T), } -fn main() { let x = 10; let x = x + 20; assert (x == 30); foo([]); } +fn main() { let x = 10; let x = x + 20; assert (x == 30); foo([]/~); } diff --git a/src/test/run-pass/shape_intrinsic_tag_then_rec.rs b/src/test/run-pass/shape_intrinsic_tag_then_rec.rs index 65b793564ad..3a40b378f14 100644 --- a/src/test/run-pass/shape_intrinsic_tag_then_rec.rs +++ b/src/test/run-pass/shape_intrinsic_tag_then_rec.rs @@ -16,7 +16,7 @@ enum opt_span { type span = {lo: uint, hi: uint, expanded_from: opt_span}; type spanned = { data: T, span: span }; type ty_ = uint; -type path_ = { global: bool, idents: [str], types: [@ty] }; +type path_ = { global: bool, idents: [str]/~, types: [@ty]/~ }; type path = spanned; type ty = spanned; diff --git a/src/test/run-pass/size-and-align.rs b/src/test/run-pass/size-and-align.rs index ce70efa318e..3617546d0fc 100644 --- a/src/test/run-pass/size-and-align.rs +++ b/src/test/run-pass/size-and-align.rs @@ -4,7 +4,7 @@ // -*- rust -*- enum clam { a(T, int), b, } -fn uhoh(v: [clam]) { +fn uhoh(v: [clam]/~) { alt v[1] { a::(t, u) { #debug("incorrect"); log(debug, u); fail; } b:: { #debug("correct"); } @@ -12,6 +12,6 @@ fn uhoh(v: [clam]) { } fn main() { - let v: [clam] = [b::, b::, a::(42, 17)]; + let v: [clam]/~ = [b::, b::, a::(42, 17)]/~; uhoh::(v); } diff --git a/src/test/run-pass/static-impl.rs b/src/test/run-pass/static-impl.rs index 10a67b9217c..edceedae71d 100644 --- a/src/test/run-pass/static-impl.rs +++ b/src/test/run-pass/static-impl.rs @@ -17,12 +17,12 @@ impl util for uint { } } -impl util for [T] { +impl util for [T]/~ { fn length_() -> uint { vec::len(self) } fn iter_(f: fn(T)) { for self.each {|x| f(x); } } - fn map_(f: fn(T) -> U) -> [U] { - let mut r = []; - for self.each {|elt| r += [f(elt)]; } + fn map_(f: fn(T) -> U) -> [U]/~ { + let mut r = []/~; + for self.each {|elt| r += [f(elt)]/~; } r } } @@ -33,9 +33,9 @@ fn main() { assert 10u.plus() == 30; assert "hi".plus() == 200; - assert [1].length_().str() == "1"; - assert [3, 4].map_({|a| a + 4})[0] == 7; - assert [3, 4].map_::({|a| a as uint + 4u})[0] == 7u; + assert [1]/~.length_().str() == "1"; + assert [3, 4]/~.map_({|a| a + 4})[0] == 7; + assert [3, 4]/~.map_::({|a| a as uint + 4u})[0] == 7u; let mut x = 0u; 10u.times {|_n| x += 2u;} assert x == 20u; diff --git a/src/test/run-pass/swap-2.rs b/src/test/run-pass/swap-2.rs index f94205f4a9f..a70ffba452e 100644 --- a/src/test/run-pass/swap-2.rs +++ b/src/test/run-pass/swap-2.rs @@ -1,7 +1,7 @@ fn swap(v: [mut T], i: int, j: int) { v[i] <-> v[j]; } fn main() { - let a: [mut int] = [mut 0, 1, 2, 3, 4, 5, 6]; + let a: [mut int]/~ = [mut 0, 1, 2, 3, 4, 5, 6]/~; swap(a, 2, 4); assert (a[2] == 4); assert (a[4] == 2); diff --git a/src/test/run-pass/tag-in-block.rs b/src/test/run-pass/tag-in-block.rs index c08d440874d..5d3088cc8ae 100644 --- a/src/test/run-pass/tag-in-block.rs +++ b/src/test/run-pass/tag-in-block.rs @@ -6,4 +6,4 @@ fn foo() { fn baz() { zed(nil); } } -fn main(args: [str]) { } +fn main(args: [str]/~) { } diff --git a/src/test/run-pass/task-comm-16.rs b/src/test/run-pass/task-comm-16.rs index d250b311ccc..5c7b8c7b282 100644 --- a/src/test/run-pass/task-comm-16.rs +++ b/src/test/run-pass/task-comm-16.rs @@ -25,7 +25,7 @@ fn test_rec() { fn test_vec() { let po = port(); let ch = chan(po); - let v0: [int] = [0, 1, 2]; + let v0: [int]/~ = [0, 1, 2]/~; send(ch, v0); let v1 = recv(po); assert (v1[0] == 0); diff --git a/src/test/run-pass/task-comm-3.rs b/src/test/run-pass/task-comm-3.rs index 8364c3a3fd0..17cd1a1ce93 100644 --- a/src/test/run-pass/task-comm-3.rs +++ b/src/test/run-pass/task-comm-3.rs @@ -30,10 +30,10 @@ fn test00() { let mut i: int = 0; // Create and spawn tasks... - let mut results = []; + let mut results = []/~; while i < number_of_tasks { let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {|copy i| test00_start(ch, i, number_of_messages) } diff --git a/src/test/run-pass/task-comm.rs b/src/test/run-pass/task-comm.rs index 9118075c642..97c2a8643ad 100644 --- a/src/test/run-pass/task-comm.rs +++ b/src/test/run-pass/task-comm.rs @@ -38,11 +38,11 @@ fn test00() { let mut i: int = 0; - let mut results = []; + let mut results = []/~; while i < number_of_tasks { i = i + 1; let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {|copy i| test00_start(ch, i, number_of_messages); } @@ -125,11 +125,11 @@ fn test06() { let mut i: int = 0; - let mut results = []; + let mut results = []/~; while i < number_of_tasks { i = i + 1; let builder = task::builder(); - results += [task::future_result(builder)]; + results += [task::future_result(builder)]/~; task::run(builder) {|copy i| test06_start(i); }; diff --git a/src/test/run-pass/task-killjoin-rsrc.rs b/src/test/run-pass/task-killjoin-rsrc.rs index 0696d176147..6dd5827836e 100644 --- a/src/test/run-pass/task-killjoin-rsrc.rs +++ b/src/test/run-pass/task-killjoin-rsrc.rs @@ -14,7 +14,7 @@ class notify { task::get_task(), ptr::addr_of(*(self.v)) as uint, task::failing(), - *(self.v)]; + *(self.v)]/~; let b = *(self.v); comm::send(self.ch, b); } @@ -26,7 +26,7 @@ fn joinable(f: fn~()) -> comm::port { let b = @mut false; #error["wrapper: task=%? allocated v=%x", task::get_task(), - ptr::addr_of(*b) as uint]; + ptr::addr_of(*b) as uint]/~; let _r = notify(c, b); f(); *b = true; diff --git a/src/test/run-pass/type-param.rs b/src/test/run-pass/type-param.rs index 8d4bccc11ff..68be7c56d4b 100644 --- a/src/test/run-pass/type-param.rs +++ b/src/test/run-pass/type-param.rs @@ -2,4 +2,4 @@ type lteq = native fn(T) -> bool; -fn main(args: [str]) { } +fn main(args: [str]/~) { } diff --git a/src/test/run-pass/type-params-in-for-each.rs b/src/test/run-pass/type-params-in-for-each.rs index 3e84e341733..9173e258003 100644 --- a/src/test/run-pass/type-params-in-for-each.rs +++ b/src/test/run-pass/type-params-in-for-each.rs @@ -5,8 +5,8 @@ fn range(lo: uint, hi: uint, it: fn(uint)) { while lo_ < hi { it(lo_); lo_ += 1u; } } -fn create_index(index: [{a: T, b: uint}], hash_fn: native fn(T) -> uint) { - range(0u, 256u) {|_i| let bucket: [T] = []; } +fn create_index(index: [{a: T, b: uint}]/~, hash_fn: native fn(T) -> uint) { + range(0u, 256u) {|_i| let bucket: [T]/~ = []/~; } } fn main() { } diff --git a/src/test/run-pass/type-ptr.rs b/src/test/run-pass/type-ptr.rs index e608a9f9836..9569954ebe6 100644 --- a/src/test/run-pass/type-ptr.rs +++ b/src/test/run-pass/type-ptr.rs @@ -2,4 +2,4 @@ fn f(a: *int) -> *int { ret a; } fn g(a: *int) -> *int { let b = f(a); ret b; } -fn main(args: [str]) { ret; } +fn main(args: [str]/~) { ret; } diff --git a/src/test/run-pass/unique-assign-generic.rs b/src/test/run-pass/unique-assign-generic.rs index ba8b4bb5723..230aa21cb90 100644 --- a/src/test/run-pass/unique-assign-generic.rs +++ b/src/test/run-pass/unique-assign-generic.rs @@ -6,6 +6,6 @@ fn f(t: T) -> T { fn main() { let t = f(~100); assert t == ~100; - let t = f(~@[100]); - assert t == ~@[100]; + let t = f(~@[100]/~); + assert t == ~@[100]/~; } diff --git a/src/test/run-pass/unique-autoderef-index.rs b/src/test/run-pass/unique-autoderef-index.rs index 6b978fc1506..43d894ed3ac 100644 --- a/src/test/run-pass/unique-autoderef-index.rs +++ b/src/test/run-pass/unique-autoderef-index.rs @@ -1,4 +1,4 @@ fn main() { - let i = ~[100]; + let i = ~[100]/~; assert i[0] == 100; } \ No newline at end of file diff --git a/src/test/run-pass/unique-create.rs b/src/test/run-pass/unique-create.rs index c46cf29ba5d..24714a94873 100644 --- a/src/test/run-pass/unique-create.rs +++ b/src/test/run-pass/unique-create.rs @@ -3,5 +3,5 @@ fn main() { } fn vec() { - [0]; + [0]/~; } \ No newline at end of file diff --git a/src/test/run-pass/unique-drop-complex.rs b/src/test/run-pass/unique-drop-complex.rs index 23048eb7868..6ae3224ccfc 100644 --- a/src/test/run-pass/unique-drop-complex.rs +++ b/src/test/run-pass/unique-drop-complex.rs @@ -1,3 +1,3 @@ fn main() { - let x = ~[0,0,0,0,0]; + let x = ~[0,0,0,0,0]/~; } \ No newline at end of file diff --git a/src/test/run-pass/unique-in-vec-copy.rs b/src/test/run-pass/unique-in-vec-copy.rs index 1a9eb581c24..ed9ffd26ae8 100644 --- a/src/test/run-pass/unique-in-vec-copy.rs +++ b/src/test/run-pass/unique-in-vec-copy.rs @@ -1,5 +1,5 @@ fn main() { - let a = [~mut 10]; + let a = [~mut 10]/~; let b = a; assert *a[0] == 10; diff --git a/src/test/run-pass/unique-in-vec.rs b/src/test/run-pass/unique-in-vec.rs index 0f5bbce050f..62391debea2 100644 --- a/src/test/run-pass/unique-in-vec.rs +++ b/src/test/run-pass/unique-in-vec.rs @@ -1,3 +1,3 @@ fn main() { - assert [~100][0] == ~100; -} \ No newline at end of file + assert ([~100]/~)[0] == ~100; +} diff --git a/src/test/run-pass/utf8_chars.rs b/src/test/run-pass/utf8_chars.rs index 99842f2ebc7..99c6a0cc510 100644 --- a/src/test/run-pass/utf8_chars.rs +++ b/src/test/run-pass/utf8_chars.rs @@ -4,7 +4,7 @@ import vec; fn main() { // Chars of 1, 2, 3, and 4 bytes - let chs: [char] = ['e', 'é', '€', 0x10000 as char]; + let chs: [char]/~ = ['e', 'é', '€', 0x10000 as char]/~; let s: str = str::from_chars(chs); assert (str::len(s) == 10u); @@ -15,9 +15,9 @@ fn main() { assert (str::char_at(s, 1u) == 'é'); assert (str::is_utf8(str::bytes(s))); - assert (!str::is_utf8([0x80_u8])); - assert (!str::is_utf8([0xc0_u8])); - assert (!str::is_utf8([0xc0_u8, 0x10_u8])); + assert (!str::is_utf8([0x80_u8]/~)); + assert (!str::is_utf8([0xc0_u8]/~)); + assert (!str::is_utf8([0xc0_u8, 0x10_u8]/~)); let mut stack = "a×c€"; assert (str::pop_char(stack) == '€'); diff --git a/src/test/run-pass/vec-concat.rs b/src/test/run-pass/vec-concat.rs index 3a5a1916d64..6aa5f5ef424 100644 --- a/src/test/run-pass/vec-concat.rs +++ b/src/test/run-pass/vec-concat.rs @@ -1,8 +1,8 @@ // -*- rust -*- fn main() { - let a: [int] = [1, 2, 3, 4, 5]; - let b: [int] = [6, 7, 8, 9, 0]; - let v: [int] = a + b; + let a: [int]/~ = [1, 2, 3, 4, 5]/~; + let b: [int]/~ = [6, 7, 8, 9, 0]/~; + let v: [int]/~ = a + b; log(debug, v[9]); assert (v[0] == 1); assert (v[7] == 8); diff --git a/src/test/run-pass/vec-drop.rs b/src/test/run-pass/vec-drop.rs index 499b24d43f0..9b75d0c2155 100644 --- a/src/test/run-pass/vec-drop.rs +++ b/src/test/run-pass/vec-drop.rs @@ -3,6 +3,6 @@ fn main() { // This just tests whether the vec leaks its members. - let pvec: [@{x: int, y: int}] = - [@{x: 1, y: 2}, @{x: 3, y: 4}, @{x: 5, y: 6}]; + let pvec: [@{x: int, y: int}]/~ = + [@{x: 1, y: 2}, @{x: 3, y: 4}, @{x: 5, y: 6}]/~; } diff --git a/src/test/run-pass/vec-growth.rs b/src/test/run-pass/vec-growth.rs index 343004c81c2..b6c4aa3659f 100644 --- a/src/test/run-pass/vec-growth.rs +++ b/src/test/run-pass/vec-growth.rs @@ -1,11 +1,11 @@ fn main() { - let mut v = [1]; - v += [2]; - v += [3]; - v += [4]; - v += [5]; + let mut v = [1]/~; + v += [2]/~; + v += [3]/~; + v += [4]/~; + v += [5]/~; assert (v[0] == 1); assert (v[1] == 2); assert (v[2] == 3); diff --git a/src/test/run-pass/vec-ivec-deadlock.rs b/src/test/run-pass/vec-ivec-deadlock.rs index a2571b57b7a..4fb7eb8f7f5 100644 --- a/src/test/run-pass/vec-ivec-deadlock.rs +++ b/src/test/run-pass/vec-ivec-deadlock.rs @@ -1 +1 @@ -fn main() { let a = [1, 2, 3, 4, 5]; let mut b = [a, a]; b += b; } +fn main() { let a = [1, 2, 3, 4, 5]/~; let mut b = [a, a]/~; b += b; } diff --git a/src/test/run-pass/vec-late-init.rs b/src/test/run-pass/vec-late-init.rs index 9b97ed092cd..6f5efa0bd01 100644 --- a/src/test/run-pass/vec-late-init.rs +++ b/src/test/run-pass/vec-late-init.rs @@ -1,7 +1,7 @@ fn main() { - let mut later: [int]; - if true { later = [1]; } else { later = [2]; } + let mut later: [int]/~; + if true { later = [1]/~; } else { later = [2]/~; } log(debug, later[0]); } diff --git a/src/test/run-pass/vec-push.rs b/src/test/run-pass/vec-push.rs index 15b9239c44b..2505b7141b1 100644 --- a/src/test/run-pass/vec-push.rs +++ b/src/test/run-pass/vec-push.rs @@ -1 +1 @@ -fn main() { let mut v = [1, 2, 3]; vec::push(v, 1); } +fn main() { let mut v = [1, 2, 3]/~; vec::push(v, 1); } diff --git a/src/test/run-pass/vec-self-append.rs b/src/test/run-pass/vec-self-append.rs index e9e6cce4a87..c897a327a5e 100644 --- a/src/test/run-pass/vec-self-append.rs +++ b/src/test/run-pass/vec-self-append.rs @@ -3,7 +3,7 @@ import vec; fn test_heap_to_heap() { // a spills onto the heap - let mut a = [0, 1, 2, 3, 4]; + let mut a = [0, 1, 2, 3, 4]/~; a += a; assert (vec::len(a) == 10u); assert (a[0] == 0); @@ -20,7 +20,7 @@ fn test_heap_to_heap() { fn test_stack_to_heap() { // a is entirely on the stack - let mut a = [0, 1, 2]; + let mut a = [0, 1, 2]/~; // a spills to the heap a += a; assert (vec::len(a) == 6u); @@ -34,7 +34,7 @@ fn test_stack_to_heap() { fn test_loop() { // Make sure we properly handle repeated self-appends. - let mut a: [int] = [0]; + let mut a: [int]/~ = [0]/~; let mut i = 20; let mut expected_len = 1u; while i > 0 { diff --git a/src/test/run-pass/vec-slice.rs b/src/test/run-pass/vec-slice.rs index b8ec6563a96..e1961d59b39 100644 --- a/src/test/run-pass/vec-slice.rs +++ b/src/test/run-pass/vec-slice.rs @@ -1,5 +1,5 @@ fn main() { - let v = [1,2,3,4,5]; + let v = [1,2,3,4,5]/~; let v2 = vec::slice(v, 1, 3); assert (v2[0] == 2); assert (v2[1] == 3); diff --git a/src/test/run-pass/vec-trailing-comma.rs b/src/test/run-pass/vec-trailing-comma.rs index 473ffe9f11d..b0dac0abd7e 100644 --- a/src/test/run-pass/vec-trailing-comma.rs +++ b/src/test/run-pass/vec-trailing-comma.rs @@ -1,10 +1,10 @@ // Issue #2482. fn main() { - let v1: [int] = [10, 20, 30,]; - let v2: [int] = [10, 20, 30]; + let v1: [int]/~ = [10, 20, 30,]/~; + let v2: [int]/~ = [10, 20, 30]/~; assert (v1[2] == v2[2]); - let v3: [int] = [10,]; - let v4: [int] = [10]; + let v3: [int]/~ = [10,]/~; + let v4: [int]/~ = [10]/~; assert (v3[0] == v4[0]); } diff --git a/src/test/run-pass/vec.rs b/src/test/run-pass/vec.rs index f1cc97ab61a..cc70cd8cf8a 100644 --- a/src/test/run-pass/vec.rs +++ b/src/test/run-pass/vec.rs @@ -3,7 +3,7 @@ // -*- rust -*- fn main() { - let v: [int] = [10, 20]; + let v: [int]/~ = [10, 20]/~; assert (v[0] == 10); assert (v[1] == 20); let mut x: int = 0; diff --git a/src/test/run-pass/vector-no-ann-2.rs b/src/test/run-pass/vector-no-ann-2.rs index 10a9cd9c6b8..7e0e614c0df 100644 --- a/src/test/run-pass/vector-no-ann-2.rs +++ b/src/test/run-pass/vector-no-ann-2.rs @@ -1 +1 @@ -fn main() { let quux: @[uint] = @[]; } +fn main() { let quux: @[uint]/~ = @[]/~; } diff --git a/src/test/run-pass/while-with-break.rs b/src/test/run-pass/while-with-break.rs index 24907f948dc..68c0779657e 100644 --- a/src/test/run-pass/while-with-break.rs +++ b/src/test/run-pass/while-with-break.rs @@ -8,8 +8,8 @@ fn main() { log(debug, i); i = i + 1; if i == 95 { - let v: [int] = - [1, 2, 3, 4, 5]; // we check that it is freed by break + let v: [int]/~ = + [1, 2, 3, 4, 5]/~; // we check that it is freed by break #debug("breaking"); break; diff --git a/src/test/run-pass/zip-same-length.rs b/src/test/run-pass/zip-same-length.rs index ef095dca8e9..4c5c3343512 100644 --- a/src/test/run-pass/zip-same-length.rs +++ b/src/test/run-pass/zip-same-length.rs @@ -5,18 +5,18 @@ import uint; import u8; import vec::{head, is_not_empty, last, same_length, zip}; -fn enum_chars(start: u8, end: u8) -> [char] { +fn enum_chars(start: u8, end: u8) -> [char]/~ { assert start < end; let mut i = start; - let mut r = []; + let mut r = []/~; while i <= end { vec::push(r, i as char); i += 1u as u8; } ret r; } -fn enum_uints(start: uint, end: uint) -> [uint] { +fn enum_uints(start: uint, end: uint) -> [uint]/~ { assert start < end; let mut i = start; - let mut r = []; + let mut r = []/~; while i <= end { vec::push(r, i); i += 1u; } ret r; }