auto merge of #13653 : jbcrail/rust/fix-comment-mistakes, r=alexcrichton
This commit is contained in:
commit
c46c7607a4
33 changed files with 53 additions and 53 deletions
|
@ -659,13 +659,13 @@ impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for Branch<K, V> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//A LeafElt containts no left child, but a key-value pair.
|
//A LeafElt contains no left child, but a key-value pair.
|
||||||
struct LeafElt<K, V> {
|
struct LeafElt<K, V> {
|
||||||
key: K,
|
key: K,
|
||||||
value: V
|
value: V
|
||||||
}
|
}
|
||||||
|
|
||||||
//A BranchElt has a left child in insertition to a key-value pair.
|
//A BranchElt has a left child in insertion to a key-value pair.
|
||||||
struct BranchElt<K, V> {
|
struct BranchElt<K, V> {
|
||||||
left: ~Node<K, V>,
|
left: ~Node<K, V>,
|
||||||
key: K,
|
key: K,
|
||||||
|
|
|
@ -605,7 +605,7 @@ static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
|
||||||
//
|
//
|
||||||
// > Why a load factor of 90%?
|
// > Why a load factor of 90%?
|
||||||
//
|
//
|
||||||
// In general, all the distances to inital buckets will converge on the mean.
|
// In general, all the distances to initial buckets will converge on the mean.
|
||||||
// At a load factor of α, the odds of finding the target bucket after k
|
// At a load factor of α, the odds of finding the target bucket after k
|
||||||
// probes is approximately 1-α^k. If we set this equal to 50% (since we converge
|
// probes is approximately 1-α^k. If we set this equal to 50% (since we converge
|
||||||
// on the mean) and set k=8 (64-byte cache line / 8-byte hash), α=0.92. I round
|
// on the mean) and set k=8 (64-byte cache line / 8-byte hash), α=0.92. I round
|
||||||
|
@ -618,7 +618,7 @@ static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
|
||||||
// > Wait, what? Where did you get 1-α^k from?
|
// > Wait, what? Where did you get 1-α^k from?
|
||||||
//
|
//
|
||||||
// On the first probe, your odds of a collision with an existing element is α.
|
// On the first probe, your odds of a collision with an existing element is α.
|
||||||
// The odds of doing this twice in a row is approximatelly α^2. For three times,
|
// The odds of doing this twice in a row is approximately α^2. For three times,
|
||||||
// α^3, etc. Therefore, the odds of colliding k times is α^k. The odds of NOT
|
// α^3, etc. Therefore, the odds of colliding k times is α^k. The odds of NOT
|
||||||
// colliding after k tries is 1-α^k.
|
// colliding after k tries is 1-α^k.
|
||||||
//
|
//
|
||||||
|
@ -692,7 +692,7 @@ static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
|
||||||
/// let mut book_reviews = HashMap::new();
|
/// let mut book_reviews = HashMap::new();
|
||||||
///
|
///
|
||||||
/// // review some books.
|
/// // review some books.
|
||||||
/// book_reviews.insert("Adventures of Hucklebury Fin", "My favorite book.");
|
/// book_reviews.insert("Adventures of Huckleberry Finn", "My favorite book.");
|
||||||
/// book_reviews.insert("Grimms' Fairy Tales", "Masterpiece.");
|
/// book_reviews.insert("Grimms' Fairy Tales", "Masterpiece.");
|
||||||
/// book_reviews.insert("Pride and Prejudice", "Very enjoyable.");
|
/// book_reviews.insert("Pride and Prejudice", "Very enjoyable.");
|
||||||
/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot.");
|
/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot.");
|
||||||
|
@ -782,7 +782,7 @@ impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> HashMap<K, V, H> {
|
||||||
/// from its 'ideal' location.
|
/// from its 'ideal' location.
|
||||||
///
|
///
|
||||||
/// In the cited blog posts above, this is called the "distance to
|
/// In the cited blog posts above, this is called the "distance to
|
||||||
/// inital bucket", or DIB.
|
/// initial bucket", or DIB.
|
||||||
fn bucket_distance(&self, index_of_elem: &table::FullIndex) -> uint {
|
fn bucket_distance(&self, index_of_elem: &table::FullIndex) -> uint {
|
||||||
// where the hash of the element that happens to reside at
|
// where the hash of the element that happens to reside at
|
||||||
// `index_of_elem` tried to place itself first.
|
// `index_of_elem` tried to place itself first.
|
||||||
|
|
|
@ -308,7 +308,7 @@ pub struct RevMutEntries<'a, K, V> {
|
||||||
// (with many different `x`) below, so we need to optionally pass mut
|
// (with many different `x`) below, so we need to optionally pass mut
|
||||||
// as a tt, but the only thing we can do with a `tt` is pass them to
|
// as a tt, but the only thing we can do with a `tt` is pass them to
|
||||||
// other macros, so this takes the `& <mutability> <operand>` token
|
// other macros, so this takes the `& <mutability> <operand>` token
|
||||||
// sequence and forces their evalutation as an expression.
|
// sequence and forces their evaluation as an expression.
|
||||||
macro_rules! addr { ($e:expr) => { $e }}
|
macro_rules! addr { ($e:expr) => { $e }}
|
||||||
// putting an optional mut into type signatures
|
// putting an optional mut into type signatures
|
||||||
macro_rules! item { ($i:item) => { $i }}
|
macro_rules! item { ($i:item) => { $i }}
|
||||||
|
|
|
@ -141,7 +141,7 @@ impl<T> TrieMap<T> {
|
||||||
// (with many different `x`) below, so we need to optionally pass mut
|
// (with many different `x`) below, so we need to optionally pass mut
|
||||||
// as a tt, but the only thing we can do with a `tt` is pass them to
|
// as a tt, but the only thing we can do with a `tt` is pass them to
|
||||||
// other macros, so this takes the `& <mutability> <operand>` token
|
// other macros, so this takes the `& <mutability> <operand>` token
|
||||||
// sequence and forces their evalutation as an expression. (see also
|
// sequence and forces their evaluation as an expression. (see also
|
||||||
// `item!` below.)
|
// `item!` below.)
|
||||||
macro_rules! addr { ($e:expr) => { $e } }
|
macro_rules! addr { ($e:expr) => { $e } }
|
||||||
|
|
||||||
|
|
|
@ -171,7 +171,7 @@ macro_rules! impl_integer_for_int {
|
||||||
/// `other`.
|
/// `other`.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn lcm(&self, other: &$T) -> $T {
|
fn lcm(&self, other: &$T) -> $T {
|
||||||
// should not have to recaluculate abs
|
// should not have to recalculate abs
|
||||||
((*self * *other) / self.gcd(other)).abs()
|
((*self * *other) / self.gcd(other)).abs()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1171,7 +1171,7 @@ fn link_args(sess: &Session,
|
||||||
// actually creates "invalid" objects [1] [2], but only for some
|
// actually creates "invalid" objects [1] [2], but only for some
|
||||||
// introspection tools, not in terms of whether it can be loaded.
|
// introspection tools, not in terms of whether it can be loaded.
|
||||||
//
|
//
|
||||||
// Long story shory, passing this flag forces the linker to *not*
|
// Long story short, passing this flag forces the linker to *not*
|
||||||
// truncate section names (so we can find the metadata section after
|
// truncate section names (so we can find the metadata section after
|
||||||
// it's compiled). The real kicker is that rust compiled just fine on
|
// it's compiled). The real kicker is that rust compiled just fine on
|
||||||
// windows for quite a long time *without* this flag, so I have no idea
|
// windows for quite a long time *without* this flag, so I have no idea
|
||||||
|
@ -1491,7 +1491,7 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Link in all of our upstream crates' native dependencies. Remember that
|
// Link in all of our upstream crates' native dependencies. Remember that
|
||||||
// all of these upstream native depenencies are all non-static
|
// all of these upstream native dependencies are all non-static
|
||||||
// dependencies. We've got two cases then:
|
// dependencies. We've got two cases then:
|
||||||
//
|
//
|
||||||
// 1. The upstream crate is an rlib. In this case we *must* link in the
|
// 1. The upstream crate is an rlib. In this case we *must* link in the
|
||||||
|
@ -1509,7 +1509,7 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
|
||||||
// be instantiated in the target crate, meaning that the native symbol must
|
// be instantiated in the target crate, meaning that the native symbol must
|
||||||
// also be resolved in the target crate.
|
// also be resolved in the target crate.
|
||||||
fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) {
|
fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) {
|
||||||
// Be sure to use a topological sorting of crates becuase there may be
|
// Be sure to use a topological sorting of crates because there may be
|
||||||
// interdependencies between native libraries. When passing -nodefaultlibs,
|
// interdependencies between native libraries. When passing -nodefaultlibs,
|
||||||
// for example, almost all native libraries depend on libc, so we have to
|
// for example, almost all native libraries depend on libc, so we have to
|
||||||
// make sure that's all the way at the right (liblibc is near the base of
|
// make sure that's all the way at the right (liblibc is near the base of
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
//! such.
|
//! such.
|
||||||
//!
|
//!
|
||||||
//! The core of this problem is when an upstream dependency changes and
|
//! The core of this problem is when an upstream dependency changes and
|
||||||
//! downstream dependants are not recompiled. This causes compile errors because
|
//! downstream dependents are not recompiled. This causes compile errors because
|
||||||
//! the upstream crate's metadata has changed but the downstream crates are
|
//! the upstream crate's metadata has changed but the downstream crates are
|
||||||
//! still referencing the older crate's metadata.
|
//! still referencing the older crate's metadata.
|
||||||
//!
|
//!
|
||||||
|
|
|
@ -222,7 +222,7 @@ impl<'a> Context<'a> {
|
||||||
//
|
//
|
||||||
// A Library candidate is created if the metadata for the set of
|
// A Library candidate is created if the metadata for the set of
|
||||||
// libraries corresponds to the crate id and hash criteria that this
|
// libraries corresponds to the crate id and hash criteria that this
|
||||||
// serach is being performed for.
|
// search is being performed for.
|
||||||
let mut libraries = Vec::new();
|
let mut libraries = Vec::new();
|
||||||
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
|
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
|
||||||
let mut metadata = None;
|
let mut metadata = None;
|
||||||
|
@ -278,7 +278,7 @@ impl<'a> Context<'a> {
|
||||||
// rlib/dylib).
|
// rlib/dylib).
|
||||||
//
|
//
|
||||||
// The return value is `None` if `file` doesn't look like a rust-generated
|
// The return value is `None` if `file` doesn't look like a rust-generated
|
||||||
// library, or if a specific version was requested and it doens't match the
|
// library, or if a specific version was requested and it doesn't match the
|
||||||
// apparent file's version.
|
// apparent file's version.
|
||||||
//
|
//
|
||||||
// If everything checks out, then `Some(hash)` is returned where `hash` is
|
// If everything checks out, then `Some(hash)` is returned where `hash` is
|
||||||
|
|
|
@ -275,7 +275,7 @@ fn create_and_seed_worklist(tcx: &ty::ctxt,
|
||||||
None => ()
|
None => ()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Seed implemeneted trait methods
|
// Seed implemented trait methods
|
||||||
let mut life_seeder = LifeSeeder {
|
let mut life_seeder = LifeSeeder {
|
||||||
worklist: worklist
|
worklist: worklist
|
||||||
};
|
};
|
||||||
|
|
|
@ -480,7 +480,7 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) {
|
||||||
// var must be dead afterwards
|
// var must be dead afterwards
|
||||||
moves::CapMove => true,
|
moves::CapMove => true,
|
||||||
|
|
||||||
// var can stil be used
|
// var can still be used
|
||||||
moves::CapCopy | moves::CapRef => false
|
moves::CapCopy | moves::CapRef => false
|
||||||
};
|
};
|
||||||
call_caps.push(CaptureInfo {ln: cv_ln,
|
call_caps.push(CaptureInfo {ln: cv_ln,
|
||||||
|
@ -613,7 +613,7 @@ impl<'a> Liveness<'a> {
|
||||||
f: |&mut Liveness<'a>, LiveNode, Variable, Span, NodeId|) {
|
f: |&mut Liveness<'a>, LiveNode, Variable, Span, NodeId|) {
|
||||||
// only consider the first pattern; any later patterns must have
|
// only consider the first pattern; any later patterns must have
|
||||||
// the same bindings, and we also consider the first pattern to be
|
// the same bindings, and we also consider the first pattern to be
|
||||||
// the "authoratative" set of ids
|
// the "authoritative" set of ids
|
||||||
if !pats.is_empty() {
|
if !pats.is_empty() {
|
||||||
self.pat_bindings(pats[0], f)
|
self.pat_bindings(pats[0], f)
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl Visitor<()> for ParentVisitor {
|
||||||
let prev = self.curparent;
|
let prev = self.curparent;
|
||||||
match item.node {
|
match item.node {
|
||||||
ast::ItemMod(..) => { self.curparent = item.id; }
|
ast::ItemMod(..) => { self.curparent = item.id; }
|
||||||
// Enum variants are parented to the enum definition itself beacuse
|
// Enum variants are parented to the enum definition itself because
|
||||||
// they inherit privacy
|
// they inherit privacy
|
||||||
ast::ItemEnum(ref def, _) => {
|
ast::ItemEnum(ref def, _) => {
|
||||||
for variant in def.variants.iter() {
|
for variant in def.variants.iter() {
|
||||||
|
@ -1034,7 +1034,7 @@ impl<'a> Visitor<()> for SanePrivacyVisitor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SanePrivacyVisitor<'a> {
|
impl<'a> SanePrivacyVisitor<'a> {
|
||||||
/// Validates all of the visibility qualifers placed on the item given. This
|
/// Validates all of the visibility qualifiers placed on the item given. This
|
||||||
/// ensures that there are no extraneous qualifiers that don't actually do
|
/// ensures that there are no extraneous qualifiers that don't actually do
|
||||||
/// anything. In theory these qualifiers wouldn't parse, but that may happen
|
/// anything. In theory these qualifiers wouldn't parse, but that may happen
|
||||||
/// later on down the road...
|
/// later on down the road...
|
||||||
|
@ -1262,7 +1262,7 @@ impl<'a> Visitor<()> for VisiblePrivateTypesVisitor<'a> {
|
||||||
self_is_public_path = visitor.outer_type_is_public_path;
|
self_is_public_path = visitor.outer_type_is_public_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
// miscellanous info about the impl
|
// miscellaneous info about the impl
|
||||||
|
|
||||||
// `true` iff this is `impl Private for ...`.
|
// `true` iff this is `impl Private for ...`.
|
||||||
let not_private_trait =
|
let not_private_trait =
|
||||||
|
|
|
@ -182,7 +182,7 @@ impl RegionMaps {
|
||||||
|
|
||||||
// else, locate the innermost terminating scope
|
// else, locate the innermost terminating scope
|
||||||
// if there's one. Static items, for instance, won't
|
// if there's one. Static items, for instance, won't
|
||||||
// have an enclusing scope, hence no scope will be
|
// have an enclosing scope, hence no scope will be
|
||||||
// returned.
|
// returned.
|
||||||
let mut id = match self.opt_encl_scope(expr_id) {
|
let mut id = match self.opt_encl_scope(expr_id) {
|
||||||
Some(i) => i,
|
Some(i) => i,
|
||||||
|
@ -533,7 +533,7 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor,
|
||||||
// the invoked function is actually running* and call.id
|
// the invoked function is actually running* and call.id
|
||||||
// represents *the time to prepare the arguments and make the
|
// represents *the time to prepare the arguments and make the
|
||||||
// call*. See the section "Borrows in Calls" borrowck/doc.rs
|
// call*. See the section "Borrows in Calls" borrowck/doc.rs
|
||||||
// for an extended explanantion of why this distinction is
|
// for an extended explanation of why this distinction is
|
||||||
// important.
|
// important.
|
||||||
//
|
//
|
||||||
// record_superlifetime(new_cx, expr.callee_id);
|
// record_superlifetime(new_cx, expr.callee_id);
|
||||||
|
@ -604,7 +604,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor,
|
||||||
// (covers cases `expr` borrows an rvalue that is then assigned
|
// (covers cases `expr` borrows an rvalue that is then assigned
|
||||||
// to memory (at least partially) owned by the binding)
|
// to memory (at least partially) owned by the binding)
|
||||||
//
|
//
|
||||||
// Here are some examples hopefully giving an intution where each
|
// Here are some examples hopefully giving an intuition where each
|
||||||
// rule comes into play and why:
|
// rule comes into play and why:
|
||||||
//
|
//
|
||||||
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
|
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
|
||||||
|
|
|
@ -72,7 +72,7 @@ pub enum LastPrivate {
|
||||||
// `use` directives (imports) can refer to two separate definitions in the
|
// `use` directives (imports) can refer to two separate definitions in the
|
||||||
// type and value namespaces. We record here the last private node for each
|
// type and value namespaces. We record here the last private node for each
|
||||||
// and whether the import is in fact used for each.
|
// and whether the import is in fact used for each.
|
||||||
// If the Option<PrivateDep> fields are None, it means there is no defintion
|
// If the Option<PrivateDep> fields are None, it means there is no definition
|
||||||
// in that namespace.
|
// in that namespace.
|
||||||
LastImport{pub value_priv: Option<PrivateDep>,
|
LastImport{pub value_priv: Option<PrivateDep>,
|
||||||
pub value_used: ImportUse,
|
pub value_used: ImportUse,
|
||||||
|
@ -3610,7 +3610,7 @@ impl<'a> Resolver<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// n.b. the discr expr gets visted twice.
|
// n.b. the discr expr gets visited twice.
|
||||||
// but maybe it's okay since the first time will signal an
|
// but maybe it's okay since the first time will signal an
|
||||||
// error if there is one? -- tjc
|
// error if there is one? -- tjc
|
||||||
self.with_type_parameter_rib(HasTypeParameters(generics,
|
self.with_type_parameter_rib(HasTypeParameters(generics,
|
||||||
|
|
|
@ -272,7 +272,7 @@ impl Subst for ty::Region {
|
||||||
substs: &ty::substs,
|
substs: &ty::substs,
|
||||||
_: Option<Span>) -> ty::Region {
|
_: Option<Span>) -> ty::Region {
|
||||||
// Note: This routine only handles regions that are bound on
|
// Note: This routine only handles regions that are bound on
|
||||||
// type declarationss and other outer declarations, not those
|
// type declarations and other outer declarations, not those
|
||||||
// bound in *fn types*. Region substitution of the bound
|
// bound in *fn types*. Region substitution of the bound
|
||||||
// regions that appear in a function signature is done using
|
// regions that appear in a function signature is done using
|
||||||
// the specialized routine
|
// the specialized routine
|
||||||
|
|
|
@ -963,7 +963,7 @@ fn get_options(bcx: &Block, m: &[Match], col: uint) -> Vec<Opt> {
|
||||||
if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;}
|
if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;}
|
||||||
set.push(val);
|
set.push(val);
|
||||||
}
|
}
|
||||||
// Vector comparisions are special in that since the actual
|
// Vector comparisons are special in that since the actual
|
||||||
// conditions over-match, we need to be careful about them. This
|
// conditions over-match, we need to be careful about them. This
|
||||||
// means that in order to properly handle things in order, we need
|
// means that in order to properly handle things in order, we need
|
||||||
// to not always merge conditions.
|
// to not always merge conditions.
|
||||||
|
|
|
@ -370,7 +370,7 @@ pub fn trans_fn_ref_with_vtables(
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create a monomorphic verison of generic functions
|
// Create a monomorphic version of generic functions
|
||||||
if must_monomorphise {
|
if must_monomorphise {
|
||||||
// Should be either intra-crate or inlined.
|
// Should be either intra-crate or inlined.
|
||||||
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
|
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
|
||||||
|
|
|
@ -84,7 +84,7 @@ use syntax::ast_util;
|
||||||
// because the alignment requirements of the bound data affects the
|
// because the alignment requirements of the bound data affects the
|
||||||
// alignment requires of the closure_data struct as a whole. However,
|
// alignment requires of the closure_data struct as a whole. However,
|
||||||
// right now this is a non-issue in any case, because the size of the
|
// right now this is a non-issue in any case, because the size of the
|
||||||
// rust_opaque_box header is always a mutiple of 16-bytes, which is
|
// rust_opaque_box header is always a multiple of 16-bytes, which is
|
||||||
// the maximum alignment requirement we ever have to worry about.
|
// the maximum alignment requirement we ever have to worry about.
|
||||||
//
|
//
|
||||||
// The only reason alignment matters is that, in order to learn what data
|
// The only reason alignment matters is that, in order to learn what data
|
||||||
|
|
|
@ -2503,7 +2503,7 @@ fn populate_scope_map(cx: &CrateContext,
|
||||||
ast::PatIdent(_, ref path_ref, ref sub_pat_opt) => {
|
ast::PatIdent(_, ref path_ref, ref sub_pat_opt) => {
|
||||||
|
|
||||||
// Check if this is a binding. If so we need to put it on the scope stack and maybe
|
// Check if this is a binding. If so we need to put it on the scope stack and maybe
|
||||||
// introduce an articial scope
|
// introduce an artificial scope
|
||||||
if pat_util::pat_is_binding(def_map, pat) {
|
if pat_util::pat_is_binding(def_map, pat) {
|
||||||
|
|
||||||
let ident = ast_util::path_to_ident(path_ref);
|
let ident = ast_util::path_to_ident(path_ref);
|
||||||
|
|
|
@ -128,7 +128,7 @@ pub fn monomorphic_fn(ccx: &CrateContext,
|
||||||
// Static default methods are a little unfortunate, in
|
// Static default methods are a little unfortunate, in
|
||||||
// that the "internal" and "external" type of them differ.
|
// that the "internal" and "external" type of them differ.
|
||||||
// Internally, the method body can refer to Self, but the
|
// Internally, the method body can refer to Self, but the
|
||||||
// externally visable type of the method has a type param
|
// externally visible type of the method has a type param
|
||||||
// inserted in between the trait type params and the
|
// inserted in between the trait type params and the
|
||||||
// method type params. The substs that we are given are
|
// method type params. The substs that we are given are
|
||||||
// the proper substs *internally* to the method body, so
|
// the proper substs *internally* to the method body, so
|
||||||
|
|
|
@ -2338,7 +2338,7 @@ pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool {
|
||||||
let r = match get(ty).sty {
|
let r = match get(ty).sty {
|
||||||
// fixed length vectors need special treatment compared to
|
// fixed length vectors need special treatment compared to
|
||||||
// normal vectors, since they don't necessarily have the
|
// normal vectors, since they don't necessarily have the
|
||||||
// possibilty to have length zero.
|
// possibility to have length zero.
|
||||||
ty_vec(_, Some(0)) => false, // don't need no contents
|
ty_vec(_, Some(0)) => false, // don't need no contents
|
||||||
ty_vec(mt, Some(_)) => type_requires(cx, seen, r_ty, mt.ty),
|
ty_vec(mt, Some(_)) => type_requires(cx, seen, r_ty, mt.ty),
|
||||||
|
|
||||||
|
|
|
@ -238,7 +238,7 @@ pub struct FnCtxt<'a> {
|
||||||
//
|
//
|
||||||
// What we do in such cases is to generate a region variable with
|
// What we do in such cases is to generate a region variable with
|
||||||
// `region_lb` as a lower bound. The regionck pass then adds
|
// `region_lb` as a lower bound. The regionck pass then adds
|
||||||
// other constriants based on how the variable is used and region
|
// other constraints based on how the variable is used and region
|
||||||
// inference selects the ultimate value. Finally, borrowck is
|
// inference selects the ultimate value. Finally, borrowck is
|
||||||
// charged with guaranteeing that the value whose address was taken
|
// charged with guaranteeing that the value whose address was taken
|
||||||
// can actually be made to live as long as it needs to live.
|
// can actually be made to live as long as it needs to live.
|
||||||
|
@ -2548,7 +2548,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
||||||
ty::mt {ty: t, mutbl: mutability},
|
ty::mt {ty: t, mutbl: mutability},
|
||||||
None)), // Sadly, we know the length
|
None)), // Sadly, we know the length
|
||||||
// - Some(args.len()) - but
|
// - Some(args.len()) - but
|
||||||
// must thow it away or cause
|
// must throw it away or cause
|
||||||
// confusion further down the
|
// confusion further down the
|
||||||
// pipeline. Hopefully we can
|
// pipeline. Hopefully we can
|
||||||
// remedy this later.
|
// remedy this later.
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
// therefore cannot sensibly be mapped to any particular result. By
|
// therefore cannot sensibly be mapped to any particular result. By
|
||||||
// default, we will leave such variables as is (so you will get back a
|
// default, we will leave such variables as is (so you will get back a
|
||||||
// variable in your result). The options force_* will cause the
|
// variable in your result). The options force_* will cause the
|
||||||
// resolution to fail in this case intead, except for the case of
|
// resolution to fail in this case instead, except for the case of
|
||||||
// integral variables, which resolve to `int` if forced.
|
// integral variables, which resolve to `int` if forced.
|
||||||
//
|
//
|
||||||
// # resolve_all and force_all
|
// # resolve_all and force_all
|
||||||
|
|
|
@ -146,14 +146,14 @@ impl FixedBuffer for FixedBuffer64 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// While we have at least a full buffer size chunks's worth of data, process that data
|
// While we have at least a full buffer size chunk's worth of data, process that data
|
||||||
// without copying it into the buffer
|
// without copying it into the buffer
|
||||||
while input.len() - i >= size {
|
while input.len() - i >= size {
|
||||||
func(input.slice(i, i + size));
|
func(input.slice(i, i + size));
|
||||||
i += size;
|
i += size;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy any input data into the buffer. At this point in the method, the ammount of
|
// Copy any input data into the buffer. At this point in the method, the amount of
|
||||||
// data left in the input vector will be less than the buffer size and the buffer will
|
// data left in the input vector will be less than the buffer size and the buffer will
|
||||||
// be empty.
|
// be empty.
|
||||||
let input_remaining = input.len() - i;
|
let input_remaining = input.len() - i;
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
//!
|
//!
|
||||||
//! // Our implementation of `Eq` to support `==` and `!=`.
|
//! // Our implementation of `Eq` to support `==` and `!=`.
|
||||||
//! impl Eq for SketchyNum {
|
//! impl Eq for SketchyNum {
|
||||||
//! // Our custom eq allows numbers which are near eachother to be equal! :D
|
//! // Our custom eq allows numbers which are near each other to be equal! :D
|
||||||
//! fn eq(&self, other: &SketchyNum) -> bool {
|
//! fn eq(&self, other: &SketchyNum) -> bool {
|
||||||
//! (self.num - other.num).abs() < 5
|
//! (self.num - other.num).abs() < 5
|
||||||
//! }
|
//! }
|
||||||
|
@ -283,7 +283,7 @@ mod test {
|
||||||
|
|
||||||
// Our implementation of `Eq` to support `==` and `!=`.
|
// Our implementation of `Eq` to support `==` and `!=`.
|
||||||
impl Eq for SketchyNum {
|
impl Eq for SketchyNum {
|
||||||
// Our custom eq allows numbers which are near eachother to be equal! :D
|
// Our custom eq allows numbers which are near each other to be equal! :D
|
||||||
fn eq(&self, other: &SketchyNum) -> bool {
|
fn eq(&self, other: &SketchyNum) -> bool {
|
||||||
(self.num - other.num).abs() < 5
|
(self.num - other.num).abs() < 5
|
||||||
}
|
}
|
||||||
|
|
|
@ -937,7 +937,7 @@ impl<A: TotalOrd, T: Iterator<A>> OrdIterator<A> for T {
|
||||||
loop {
|
loop {
|
||||||
// `first` and `second` are the two next elements we want to look at.
|
// `first` and `second` are the two next elements we want to look at.
|
||||||
// We first compare `first` and `second` (#1). The smaller one is then compared to
|
// We first compare `first` and `second` (#1). The smaller one is then compared to
|
||||||
// current mininum (#2). The larger one is compared to current maximum (#3). This
|
// current minimum (#2). The larger one is compared to current maximum (#3). This
|
||||||
// way we do 3 comparisons for 2 elements.
|
// way we do 3 comparisons for 2 elements.
|
||||||
let first = match self.next() {
|
let first = match self.next() {
|
||||||
None => break,
|
None => break,
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
/// which is transmitted.
|
/// which is transmitted.
|
||||||
///
|
///
|
||||||
/// The multi-argument form of this macro fails with a string and has the
|
/// The multi-argument form of this macro fails with a string and has the
|
||||||
/// `format!` sytnax for building a string.
|
/// `format!` syntax for building a string.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
///
|
///
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
//! an unsafe pointer when safe pointers are unsuitable;
|
//! an unsafe pointer when safe pointers are unsuitable;
|
||||||
//! checking for null; and converting back to safe pointers.
|
//! checking for null; and converting back to safe pointers.
|
||||||
//! As a result, there is not yet an abundance of library code
|
//! As a result, there is not yet an abundance of library code
|
||||||
//! for working with unsafe poniters, and in particular,
|
//! for working with unsafe pointers, and in particular,
|
||||||
//! since pointer math is fairly uncommon in Rust, it is not
|
//! since pointer math is fairly uncommon in Rust, it is not
|
||||||
//! all that convenient.
|
//! all that convenient.
|
||||||
//!
|
//!
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
//! They can be used as targets of transmutes in unsafe code for manipulating
|
//! They can be used as targets of transmutes in unsafe code for manipulating
|
||||||
//! the raw representations directly.
|
//! the raw representations directly.
|
||||||
//!
|
//!
|
||||||
//! Their definitition should always match the ABI defined in `rustc::back::abi`.
|
//! Their definition should always match the ABI defined in `rustc::back::abi`.
|
||||||
|
|
||||||
use cast;
|
use cast;
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@
|
||||||
//! ~~~
|
//! ~~~
|
||||||
//!
|
//!
|
||||||
//! *Note: The actual definition of `Writer` uses `IoResult`, which
|
//! *Note: The actual definition of `Writer` uses `IoResult`, which
|
||||||
//! is just a synonymn for `Result<T, IoError>`.*
|
//! is just a synonym for `Result<T, IoError>`.*
|
||||||
//!
|
//!
|
||||||
//! This method doesn`t produce a value, but the write may
|
//! This method doesn`t produce a value, but the write may
|
||||||
//! fail. It's crucial to handle the error case, and *not* write
|
//! fail. It's crucial to handle the error case, and *not* write
|
||||||
|
@ -255,7 +255,7 @@
|
||||||
//! handling requires encapsulating fallable code in a task. Calling
|
//! handling requires encapsulating fallable code in a task. Calling
|
||||||
//! the `fail!` macro, or invoking `fail!` indirectly should be
|
//! the `fail!` macro, or invoking `fail!` indirectly should be
|
||||||
//! avoided as an error reporting strategy. Failure is only for
|
//! avoided as an error reporting strategy. Failure is only for
|
||||||
//! unrecovereable errors and a failing task is typically the sign of
|
//! unrecoverable errors and a failing task is typically the sign of
|
||||||
//! a bug.
|
//! a bug.
|
||||||
//!
|
//!
|
||||||
//! A module that instead returns `Results` is alerting the caller
|
//! A module that instead returns `Results` is alerting the caller
|
||||||
|
|
|
@ -374,7 +374,7 @@ pub mod native {
|
||||||
pub fn maybe_tls_key() -> Option<tls::Key> {
|
pub fn maybe_tls_key() -> Option<tls::Key> {
|
||||||
unsafe {
|
unsafe {
|
||||||
// NB: This is a little racy because, while the key is
|
// NB: This is a little racy because, while the key is
|
||||||
// initalized under a mutex and it's assumed to be initalized
|
// initialized under a mutex and it's assumed to be initialized
|
||||||
// in the Scheduler ctor by any thread that needs to use it,
|
// in the Scheduler ctor by any thread that needs to use it,
|
||||||
// we are not accessing the key under a mutex. Threads that
|
// we are not accessing the key under a mutex. Threads that
|
||||||
// are not using the new Scheduler but still *want to check*
|
// are not using the new Scheduler but still *want to check*
|
||||||
|
|
|
@ -662,7 +662,7 @@ impl<'a> Iterator<char> for Normalizations<'a> {
|
||||||
///
|
///
|
||||||
/// # Return value
|
/// # Return value
|
||||||
///
|
///
|
||||||
/// The original string with all occurances of `from` replaced with `to`
|
/// The original string with all occurrences of `from` replaced with `to`
|
||||||
pub fn replace(s: &str, from: &str, to: &str) -> ~str {
|
pub fn replace(s: &str, from: &str, to: &str) -> ~str {
|
||||||
let mut result = StrBuf::new();
|
let mut result = StrBuf::new();
|
||||||
let mut last_end = 0;
|
let mut last_end = 0;
|
||||||
|
@ -1443,7 +1443,7 @@ pub mod raw {
|
||||||
/// Sets the length of a string
|
/// Sets the length of a string
|
||||||
///
|
///
|
||||||
/// This will explicitly set the size of the string, without actually
|
/// This will explicitly set the size of the string, without actually
|
||||||
/// modifing its buffers, so it is up to the caller to ensure that
|
/// modifying its buffers, so it is up to the caller to ensure that
|
||||||
/// the string is actually the specified size.
|
/// the string is actually the specified size.
|
||||||
#[test]
|
#[test]
|
||||||
fn test_from_buf_len() {
|
fn test_from_buf_len() {
|
||||||
|
@ -2022,7 +2022,7 @@ pub trait StrSlice<'a> {
|
||||||
///
|
///
|
||||||
/// # Return value
|
/// # Return value
|
||||||
///
|
///
|
||||||
/// The original string with all occurances of `from` replaced with `to`.
|
/// The original string with all occurrences of `from` replaced with `to`.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
///
|
///
|
||||||
|
|
|
@ -31,7 +31,7 @@ pub struct StrBuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StrBuf {
|
impl StrBuf {
|
||||||
/// Creates a new string buffer initalized with the empty string.
|
/// Creates a new string buffer initialized with the empty string.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new() -> StrBuf {
|
pub fn new() -> StrBuf {
|
||||||
StrBuf {
|
StrBuf {
|
||||||
|
|
|
@ -135,7 +135,7 @@ impl<T> Vec<T> {
|
||||||
Vec { len: length, cap: capacity, ptr: ptr }
|
Vec { len: length, cap: capacity, ptr: ptr }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the `Vec`, partitioning it based on a predcate.
|
/// Consumes the `Vec`, partitioning it based on a predicate.
|
||||||
///
|
///
|
||||||
/// Partitions the `Vec` into two `Vec`s `(A,B)`, where all elements of `A`
|
/// Partitions the `Vec` into two `Vec`s `(A,B)`, where all elements of `A`
|
||||||
/// satisfy `f` and all elements of `B` do not. The order of elements is
|
/// satisfy `f` and all elements of `B` do not. The order of elements is
|
||||||
|
@ -279,7 +279,7 @@ impl<T: Clone> Vec<T> {
|
||||||
*self.get_mut(index) = value;
|
*self.get_mut(index) = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Partitions a vector based on a predcate.
|
/// Partitions a vector based on a predicate.
|
||||||
///
|
///
|
||||||
/// Clones the elements of the vector, partitioning them into two `Vec`s
|
/// Clones the elements of the vector, partitioning them into two `Vec`s
|
||||||
/// `(A,B)`, where all elements of `A` satisfy `f` and all elements of `B`
|
/// `(A,B)`, where all elements of `A` satisfy `f` and all elements of `B`
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue