Comments only: change TODOs to FIXMEs and annotate them
This commit is contained in:
parent
50d2e7e07e
commit
889be71cb4
13 changed files with 19 additions and 20 deletions
|
@ -588,7 +588,8 @@ mod node {
|
||||||
* Used for rebalancing and to allocate stacks for traversals.
|
* Used for rebalancing and to allocate stacks for traversals.
|
||||||
*/
|
*/
|
||||||
type concat = {
|
type concat = {
|
||||||
left: @node,//TODO: Perhaps a `vec` instead of `left`/`right`
|
//FIXME (#2744): Perhaps a `vec` instead of `left`/`right`
|
||||||
|
left: @node,
|
||||||
right: @node,
|
right: @node,
|
||||||
char_len: uint,
|
char_len: uint,
|
||||||
byte_len: uint,
|
byte_len: uint,
|
||||||
|
@ -732,7 +733,8 @@ mod node {
|
||||||
}
|
}
|
||||||
|
|
||||||
pure fn byte_len(node: @node) -> uint {
|
pure fn byte_len(node: @node) -> uint {
|
||||||
alt(*node) {//TODO: Could we do this without the pattern-matching?
|
//FIXME (#2744): Could we do this without the pattern-matching?
|
||||||
|
alt(*node) {
|
||||||
leaf(y) { ret y.byte_len; }
|
leaf(y) { ret y.byte_len; }
|
||||||
concat(y){ ret y.byte_len; }
|
concat(y){ ret y.byte_len; }
|
||||||
}
|
}
|
||||||
|
@ -805,7 +807,7 @@ mod node {
|
||||||
alt(leaf_iterator::next(it)) {
|
alt(leaf_iterator::next(it)) {
|
||||||
option::none { break; }
|
option::none { break; }
|
||||||
option::some(x) {
|
option::some(x) {
|
||||||
//TODO: Replace with memcpy or something similar
|
//FIXME (#2744): Replace with memcpy or something similar
|
||||||
let mut local_buf: ~[u8] =
|
let mut local_buf: ~[u8] =
|
||||||
unsafe::reinterpret_cast(*x.content);
|
unsafe::reinterpret_cast(*x.content);
|
||||||
let mut i = x.byte_offset;
|
let mut i = x.byte_offset;
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
import core::option;
|
import core::option;
|
||||||
|
|
||||||
// TODO: Windows support.
|
// FIXME (#2807): Windows support.
|
||||||
|
|
||||||
const color_black: u8 = 0u8;
|
const color_black: u8 = 0u8;
|
||||||
const color_red: u8 = 1u8;
|
const color_red: u8 = 1u8;
|
||||||
|
|
|
@ -55,7 +55,7 @@ fn find<K: copy, V: copy>(m: &const tree_edge<K, V>, k: K) -> option<V> {
|
||||||
alt copy *m {
|
alt copy *m {
|
||||||
none { none }
|
none { none }
|
||||||
|
|
||||||
// TODO: was that an optimization?
|
// FIXME (#2808): was that an optimization?
|
||||||
some(node) {
|
some(node) {
|
||||||
if k == node.key {
|
if k == node.key {
|
||||||
some(node.value)
|
some(node.value)
|
||||||
|
|
|
@ -368,7 +368,7 @@ enum inline_attr {
|
||||||
|
|
||||||
/// True if something like #[inline] is found in the list of attrs.
|
/// True if something like #[inline] is found in the list of attrs.
|
||||||
fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
|
fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
|
||||||
// TODO---validate the usage of #[inline] and #[inline(always)]
|
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
|
||||||
do vec::foldl(ia_none, attrs) |ia,attr| {
|
do vec::foldl(ia_none, attrs) |ia,attr| {
|
||||||
alt attr.node.value.node {
|
alt attr.node.value.node {
|
||||||
ast::meta_word(@"inline") { ia_hint }
|
ast::meta_word(@"inline") { ia_hint }
|
||||||
|
|
|
@ -58,7 +58,7 @@ Similarly, the code to deserialize an instance of a non-built-in type
|
||||||
where `c_Ti` is the code to deserialize an instance of `Ti` using the
|
where `c_Ti` is the code to deserialize an instance of `Ti` using the
|
||||||
deserializer `d`.
|
deserializer `d`.
|
||||||
|
|
||||||
TODO--Hygiene. Search for "__" strings. We also assume "std" is the
|
FIXME (#2810)--Hygiene. Search for "__" strings. We also assume "std" is the
|
||||||
standard library.
|
standard library.
|
||||||
|
|
||||||
Misc notes:
|
Misc notes:
|
||||||
|
|
|
@ -23,7 +23,8 @@ enum tt_frame_up { /* to break a circularity */
|
||||||
tt_frame_up(option<tt_frame>)
|
tt_frame_up(option<tt_frame>)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* TODO: figure out how to have a uniquely linked stack, and change to `~` */
|
/* FIXME (#2811): figure out how to have a uniquely linked stack,
|
||||||
|
and change to `~` */
|
||||||
/// an unzipping of `token_tree`s
|
/// an unzipping of `token_tree`s
|
||||||
type tt_frame = @{
|
type tt_frame = @{
|
||||||
readme: ~[ast::token_tree],
|
readme: ~[ast::token_tree],
|
||||||
|
|
|
@ -58,8 +58,8 @@ mod write {
|
||||||
let td = mk_target_data(
|
let td = mk_target_data(
|
||||||
sess.targ_cfg.target_strs.data_layout);
|
sess.targ_cfg.target_strs.data_layout);
|
||||||
llvm::LLVMAddTargetData(td.lltd, pm.llpm);
|
llvm::LLVMAddTargetData(td.lltd, pm.llpm);
|
||||||
// TODO: run the linter here also, once there are llvm-c bindings for
|
// FIXME (#2812): run the linter here also, once there are llvm-c
|
||||||
// it.
|
// bindings for it.
|
||||||
|
|
||||||
// Generate a pre-optimization intermediate file if -save-temps was
|
// Generate a pre-optimization intermediate file if -save-temps was
|
||||||
// specified.
|
// specified.
|
||||||
|
|
|
@ -162,7 +162,7 @@ impl translation_routines for extended_decode_ctxt {
|
||||||
{crate: ast::local_crate, node: self.tr_id(did.node)}
|
{crate: ast::local_crate, node: self.tr_id(did.node)}
|
||||||
}
|
}
|
||||||
fn tr_span(_span: span) -> span {
|
fn tr_span(_span: span) -> span {
|
||||||
ast_util::dummy_sp() // TODO...
|
ast_util::dummy_sp() // FIXME (#1972): handle span properly
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2758,9 +2758,6 @@ fn lookup_field_type(tcx: ctxt, class_id: def_id, id: def_id,
|
||||||
some(tpt) { tpt.ty }
|
some(tpt) { tpt.ty }
|
||||||
none {
|
none {
|
||||||
let tpt = csearch::get_field_type(tcx, class_id, id);
|
let tpt = csearch::get_field_type(tcx, class_id, id);
|
||||||
// ok b/c fields are monomorphic
|
|
||||||
// TODO: Comment might be a lie, what if it mentions
|
|
||||||
// class-bound ty params?
|
|
||||||
tcx.tcache.insert(id, tpt);
|
tcx.tcache.insert(id, tpt);
|
||||||
tpt.ty
|
tpt.ty
|
||||||
}
|
}
|
||||||
|
|
|
@ -344,7 +344,8 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) {
|
||||||
inputs: t_args,
|
inputs: t_args,
|
||||||
output: t_res,
|
output: t_res,
|
||||||
ret_style: ast::return_val,
|
ret_style: ast::return_val,
|
||||||
constraints: ~[]}); // tjc TODO
|
constraints: ~[]}); // FIXME (#2813): allow ctors to have
|
||||||
|
// constraints, or remove constraints from the language
|
||||||
write_ty_to_tcx(tcx, ctor.node.id, t_ctor);
|
write_ty_to_tcx(tcx, ctor.node.id, t_ctor);
|
||||||
tcx.tcache.insert(local_def(ctor.node.id),
|
tcx.tcache.insert(local_def(ctor.node.id),
|
||||||
{bounds: tpt.bounds,
|
{bounds: tpt.bounds,
|
||||||
|
|
|
@ -554,8 +554,8 @@ impl transaction_methods for infer_ctxt {
|
||||||
|
|
||||||
let r <- self.try(f);
|
let r <- self.try(f);
|
||||||
|
|
||||||
// TODO---could use a vec::clear() that ran destructors but kept
|
// FIXME (#2814)---could use a vec::clear() that ran destructors but
|
||||||
// the vec at its currently allocated length
|
// kept the vec at its currently allocated length
|
||||||
self.tvb.bindings = ~[];
|
self.tvb.bindings = ~[];
|
||||||
self.rb.bindings = ~[];
|
self.rb.bindings = ~[];
|
||||||
|
|
||||||
|
|
|
@ -235,7 +235,7 @@ fn main(argv: ~[str]) {
|
||||||
|
|
||||||
out.write_line(#fmt["Usage: %s <filename> ...", argv[0]]);
|
out.write_line(#fmt["Usage: %s <filename> ...", argv[0]]);
|
||||||
|
|
||||||
// TODO: run something just to make sure the code hasn't
|
// FIXME (#2815): run something just to make sure the code hasn't
|
||||||
// broken yet. This is the unit test mode of this program.
|
// broken yet. This is the unit test mode of this program.
|
||||||
|
|
||||||
ret;
|
ret;
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
// Uses foldl to exhibit the unchecked block syntax.
|
// Uses foldl to exhibit the unchecked block syntax.
|
||||||
// TODO: since list's head/tail require the predicate "is_not_empty" now and
|
|
||||||
// we have unit tests for list, this test might me not necessary anymore?
|
|
||||||
use std;
|
use std;
|
||||||
|
|
||||||
import std::list::*;
|
import std::list::*;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue