Fix more misspelled comments and strings.
This commit is contained in:
parent
0ee6a8e8a5
commit
c2c9946372
48 changed files with 64 additions and 64 deletions
|
@ -184,7 +184,7 @@ impl<T: Share + Send> Drop for Arc<T> {
|
||||||
|
|
||||||
// This fence is needed to prevent reordering of use of the data and
|
// This fence is needed to prevent reordering of use of the data and
|
||||||
// deletion of the data. Because it is marked `Release`, the
|
// deletion of the data. Because it is marked `Release`, the
|
||||||
// decreasing of the reference count sychronizes with this `Acquire`
|
// decreasing of the reference count synchronizes with this `Acquire`
|
||||||
// fence. This means that use of the data happens before decreasing
|
// fence. This means that use of the data happens before decreasing
|
||||||
// the refernce count, which happens before this fence, which
|
// the refernce count, which happens before this fence, which
|
||||||
// happens before the deletion of the data.
|
// happens before the deletion of the data.
|
||||||
|
|
|
@ -539,7 +539,7 @@ impl<'a> Formatter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs a callback, emitting the correct padding either before or
|
/// Runs a callback, emitting the correct padding either before or
|
||||||
/// afterwards depending on whether right or left alingment is requested.
|
/// afterwards depending on whether right or left alignment is requested.
|
||||||
fn with_padding(&mut self,
|
fn with_padding(&mut self,
|
||||||
padding: uint,
|
padding: uint,
|
||||||
default: rt::Alignment,
|
default: rt::Alignment,
|
||||||
|
|
|
@ -105,7 +105,7 @@
|
||||||
//! *Note: The actual definition of `Writer` uses `IoResult`, which
|
//! *Note: The actual definition of `Writer` uses `IoResult`, which
|
||||||
//! is just a synonym for `Result<T, IoError>`.*
|
//! is just a synonym for `Result<T, IoError>`.*
|
||||||
//!
|
//!
|
||||||
//! This method doesn`t produce a value, but the write may
|
//! This method doesn't produce a value, but the write may
|
||||||
//! fail. It's crucial to handle the error case, and *not* write
|
//! fail. It's crucial to handle the error case, and *not* write
|
||||||
//! something like this:
|
//! something like this:
|
||||||
//!
|
//!
|
||||||
|
|
|
@ -163,7 +163,7 @@ pub struct OptGroup {
|
||||||
pub occur: Occur
|
pub occur: Occur
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Describes wether an option is given at all or has a value.
|
/// Describes whether an option is given at all or has a value.
|
||||||
#[deriving(Clone, PartialEq)]
|
#[deriving(Clone, PartialEq)]
|
||||||
enum Optval {
|
enum Optval {
|
||||||
Val(String),
|
Val(String),
|
||||||
|
|
|
@ -226,7 +226,7 @@ fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint,
|
||||||
regs[RUSTRT_R14] = procedure.env as uint;
|
regs[RUSTRT_R14] = procedure.env as uint;
|
||||||
regs[RUSTRT_R15] = fptr as uint;
|
regs[RUSTRT_R15] = fptr as uint;
|
||||||
|
|
||||||
// These registers are picked up by the regulard context switch paths. These
|
// These registers are picked up by the regular context switch paths. These
|
||||||
// will put us in "mostly the right context" except for frobbing all the
|
// will put us in "mostly the right context" except for frobbing all the
|
||||||
// arguments to the right place. We have the small trampoline code inside of
|
// arguments to the right place. We have the small trampoline code inside of
|
||||||
// rust_bootstrap_green_task to do that.
|
// rust_bootstrap_green_task to do that.
|
||||||
|
|
|
@ -82,7 +82,7 @@ pub struct Scheduler {
|
||||||
run_anything: bool,
|
run_anything: bool,
|
||||||
/// A fast XorShift rng for scheduler use
|
/// A fast XorShift rng for scheduler use
|
||||||
rng: XorShiftRng,
|
rng: XorShiftRng,
|
||||||
/// A togglable idle callback
|
/// A toggleable idle callback
|
||||||
idle_callback: Option<Box<PausableIdleCallback:Send>>,
|
idle_callback: Option<Box<PausableIdleCallback:Send>>,
|
||||||
/// A countdown that starts at a random value and is decremented
|
/// A countdown that starts at a random value and is decremented
|
||||||
/// every time a yield check is performed. When it hits 0 a task
|
/// every time a yield check is performed. When it hits 0 a task
|
||||||
|
@ -287,7 +287,7 @@ impl Scheduler {
|
||||||
|
|
||||||
// After processing a message, we consider doing some more work on the
|
// After processing a message, we consider doing some more work on the
|
||||||
// event loop. The "keep going" condition changes after the first
|
// event loop. The "keep going" condition changes after the first
|
||||||
// iteration becase we don't want to spin here infinitely.
|
// iteration because we don't want to spin here infinitely.
|
||||||
//
|
//
|
||||||
// Once we start doing work we can keep doing work so long as the
|
// Once we start doing work we can keep doing work so long as the
|
||||||
// iteration does something. Note that we don't want to starve the
|
// iteration does something. Note that we don't want to starve the
|
||||||
|
|
|
@ -291,7 +291,7 @@ pub use types::os::arch::extra::{mach_timebase_info};
|
||||||
extern {}
|
extern {}
|
||||||
|
|
||||||
/// A wrapper for a nullable pointer. Don't use this except for interacting
|
/// A wrapper for a nullable pointer. Don't use this except for interacting
|
||||||
/// with libc. Basically Option, but without the dependance on libstd.
|
/// with libc. Basically Option, but without the dependence on libstd.
|
||||||
// If/when libprim happens, this can be removed in favor of that
|
// If/when libprim happens, this can be removed in favor of that
|
||||||
pub enum Nullable<T> {
|
pub enum Nullable<T> {
|
||||||
Null,
|
Null,
|
||||||
|
@ -3497,7 +3497,7 @@ pub mod consts {
|
||||||
|
|
||||||
|
|
||||||
pub mod funcs {
|
pub mod funcs {
|
||||||
// Thankfull most of c95 is universally available and does not vary by OS
|
// Thankfully most of c95 is universally available and does not vary by OS
|
||||||
// or anything. The same is not true of POSIX.
|
// or anything. The same is not true of POSIX.
|
||||||
|
|
||||||
pub mod c95 {
|
pub mod c95 {
|
||||||
|
|
|
@ -50,7 +50,7 @@
|
||||||
//! it sounded like named pipes just weren't built for this kind of interaction,
|
//! it sounded like named pipes just weren't built for this kind of interaction,
|
||||||
//! and the suggested solution was to use overlapped I/O.
|
//! and the suggested solution was to use overlapped I/O.
|
||||||
//!
|
//!
|
||||||
//! I don't realy know what overlapped I/O is, but my basic understanding after
|
//! I don't really know what overlapped I/O is, but my basic understanding after
|
||||||
//! reading about it is that you have an external Event which is used to signal
|
//! reading about it is that you have an external Event which is used to signal
|
||||||
//! I/O completion, passed around in some OVERLAPPED structures. As to what this
|
//! I/O completion, passed around in some OVERLAPPED structures. As to what this
|
||||||
//! is, I'm not exactly sure.
|
//! is, I'm not exactly sure.
|
||||||
|
|
|
@ -923,7 +923,7 @@ fn waitpid(pid: pid_t, deadline: u64) -> IoResult<rtio::ProcessExit> {
|
||||||
// Register a new SIGCHLD handler, returning the reading half of the
|
// Register a new SIGCHLD handler, returning the reading half of the
|
||||||
// self-pipe plus the old handler registered (return value of sigaction).
|
// self-pipe plus the old handler registered (return value of sigaction).
|
||||||
//
|
//
|
||||||
// Be sure to set up the self-pipe first because as soon as we reigster a
|
// Be sure to set up the self-pipe first because as soon as we register a
|
||||||
// handler we're going to start receiving signals.
|
// handler we're going to start receiving signals.
|
||||||
fn register_sigchld() -> (libc::c_int, c::sigaction) {
|
fn register_sigchld() -> (libc::c_int, c::sigaction) {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
@ -166,7 +166,7 @@ impl rt::Runtime for Ops {
|
||||||
//
|
//
|
||||||
// On a mildly unrelated note, it should also be pointed out that OS
|
// On a mildly unrelated note, it should also be pointed out that OS
|
||||||
// condition variables are susceptible to spurious wakeups, which we need to
|
// condition variables are susceptible to spurious wakeups, which we need to
|
||||||
// be ready for. In order to accomodate for this fact, we have an extra
|
// be ready for. In order to accommodate for this fact, we have an extra
|
||||||
// `awoken` field which indicates whether we were actually woken up via some
|
// `awoken` field which indicates whether we were actually woken up via some
|
||||||
// invocation of `reawaken`. This flag is only ever accessed inside the
|
// invocation of `reawaken`. This flag is only ever accessed inside the
|
||||||
// lock, so there's no need to make it atomic.
|
// lock, so there's no need to make it atomic.
|
||||||
|
|
|
@ -34,7 +34,7 @@ pub enum Inst {
|
||||||
|
|
||||||
// The CharClass instruction tries to match one input character against
|
// The CharClass instruction tries to match one input character against
|
||||||
// the range of characters given.
|
// the range of characters given.
|
||||||
// The flags indicate whether to do a case insentivie match and whether
|
// The flags indicate whether to do a case insensitive match and whether
|
||||||
// the character class is negated or not.
|
// the character class is negated or not.
|
||||||
CharClass(Vec<(char, char)>, Flags),
|
CharClass(Vec<(char, char)>, Flags),
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ pub enum Inst {
|
||||||
EmptyBegin(Flags),
|
EmptyBegin(Flags),
|
||||||
|
|
||||||
// Matches the end of the string, consumes no characters.
|
// Matches the end of the string, consumes no characters.
|
||||||
// The flags indicate whether it matches if the proceding character
|
// The flags indicate whether it matches if the proceeding character
|
||||||
// is a new line.
|
// is a new line.
|
||||||
EmptyEnd(Flags),
|
EmptyEnd(Flags),
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ fn describe_codegen_flags() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Process command line options. Emits messages as appropirate.If compilation
|
/// Process command line options. Emits messages as appropriate. If compilation
|
||||||
/// should continue, returns a getopts::Matches object parsed from args, otherwise
|
/// should continue, returns a getopts::Matches object parsed from args, otherwise
|
||||||
/// returns None.
|
/// returns None.
|
||||||
pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
|
pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
|
||||||
|
|
|
@ -551,7 +551,7 @@ are computed based on the kind of borrow:
|
||||||
The reasoning here is that a mutable borrow must be the only writer,
|
The reasoning here is that a mutable borrow must be the only writer,
|
||||||
therefore it prevents other writes (`MUTATE`), mutable borrows
|
therefore it prevents other writes (`MUTATE`), mutable borrows
|
||||||
(`CLAIM`), and immutable borrows (`FREEZE`). An immutable borrow
|
(`CLAIM`), and immutable borrows (`FREEZE`). An immutable borrow
|
||||||
permits other immutable borrows but forbids writes and mutable borows.
|
permits other immutable borrows but forbids writes and mutable borrows.
|
||||||
Finally, a const borrow just wants to be sure that the value is not
|
Finally, a const borrow just wants to be sure that the value is not
|
||||||
moved out from under it, so no actions are forbidden.
|
moved out from under it, so no actions are forbidden.
|
||||||
|
|
||||||
|
|
|
@ -438,7 +438,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
|
||||||
None => {
|
None => {
|
||||||
self.tcx().sess.span_bug(
|
self.tcx().sess.span_bug(
|
||||||
callee.span,
|
callee.span,
|
||||||
format!("unxpected callee type {}",
|
format!("unexpected callee type {}",
|
||||||
callee_ty.repr(self.tcx())).as_slice());
|
callee_ty.repr(self.tcx())).as_slice());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -257,7 +257,7 @@ impl<N,E> Graph<N,E> {
|
||||||
//
|
//
|
||||||
// A common use for graphs in our compiler is to perform
|
// A common use for graphs in our compiler is to perform
|
||||||
// fixed-point iteration. In this case, each edge represents a
|
// fixed-point iteration. In this case, each edge represents a
|
||||||
// constaint, and the nodes themselves are associated with
|
// constraint, and the nodes themselves are associated with
|
||||||
// variables or other bitsets. This method facilitates such a
|
// variables or other bitsets. This method facilitates such a
|
||||||
// computation.
|
// computation.
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
* is the address of the lvalue. If Expr is an rvalue, this is the address of
|
* is the address of the lvalue. If Expr is an rvalue, this is the address of
|
||||||
* some temporary spot in memory where the result is stored.
|
* some temporary spot in memory where the result is stored.
|
||||||
*
|
*
|
||||||
* Now, cat_expr() classies the expression Expr and the address A=ToAddr(Expr)
|
* Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr)
|
||||||
* as follows:
|
* as follows:
|
||||||
*
|
*
|
||||||
* - cat: what kind of expression was this? This is a subset of the
|
* - cat: what kind of expression was this? This is a subset of the
|
||||||
|
@ -42,7 +42,7 @@
|
||||||
*
|
*
|
||||||
* The resulting categorization tree differs somewhat from the expressions
|
* The resulting categorization tree differs somewhat from the expressions
|
||||||
* themselves. For example, auto-derefs are explicit. Also, an index a[b] is
|
* themselves. For example, auto-derefs are explicit. Also, an index a[b] is
|
||||||
* decomposed into two operations: a derefence to reach the array data and
|
* decomposed into two operations: a dereference to reach the array data and
|
||||||
* then an index to jump forward to the relevant item.
|
* then an index to jump forward to the relevant item.
|
||||||
*
|
*
|
||||||
* ## By-reference upvars
|
* ## By-reference upvars
|
||||||
|
|
|
@ -39,7 +39,7 @@ The region maps encode information about region relationships.
|
||||||
|
|
||||||
- `scope_map` maps from a scope id to the enclosing scope id; this is
|
- `scope_map` maps from a scope id to the enclosing scope id; this is
|
||||||
usually corresponding to the lexical nesting, though in the case of
|
usually corresponding to the lexical nesting, though in the case of
|
||||||
closures the parent scope is the innermost conditinal expression or repeating
|
closures the parent scope is the innermost conditional expression or repeating
|
||||||
block
|
block
|
||||||
|
|
||||||
- `var_map` maps from a variable or binding id to the block in which
|
- `var_map` maps from a variable or binding id to the block in which
|
||||||
|
|
|
@ -717,7 +717,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
|
||||||
let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
|
let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
|
||||||
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
|
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
|
||||||
// The contents of memory at this pointer can't matter, but use
|
// The contents of memory at this pointer can't matter, but use
|
||||||
// the value that's "reasonable" in case of pointer comparision.
|
// the value that's "reasonable" in case of pointer comparison.
|
||||||
PointerCast(bcx, val, ty.ptr_to())
|
PointerCast(bcx, val, ty.ptr_to())
|
||||||
}
|
}
|
||||||
RawNullablePointer { nndiscr, nnty, .. } => {
|
RawNullablePointer { nndiscr, nnty, .. } => {
|
||||||
|
|
|
@ -1571,7 +1571,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span,
|
||||||
for var in variants.iter() {
|
for var in variants.iter() {
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
for field in var.fields.iter().skip(1) {
|
for field in var.fields.iter().skip(1) {
|
||||||
// skip the dicriminant
|
// skip the discriminant
|
||||||
size += llsize_of_real(ccx, sizing_type_of(ccx, *field));
|
size += llsize_of_real(ccx, sizing_type_of(ccx, *field));
|
||||||
}
|
}
|
||||||
sizes.push(size);
|
sizes.push(size);
|
||||||
|
@ -2318,7 +2318,7 @@ pub fn trans_crate(krate: ast::Crate,
|
||||||
// LLVM code generator emits a ".file filename" directive
|
// LLVM code generator emits a ".file filename" directive
|
||||||
// for ELF backends. Value of the "filename" is set as the
|
// for ELF backends. Value of the "filename" is set as the
|
||||||
// LLVM module identifier. Due to a LLVM MC bug[1], LLVM
|
// LLVM module identifier. Due to a LLVM MC bug[1], LLVM
|
||||||
// crashes if the module identifer is same as other symbols
|
// crashes if the module identifier is same as other symbols
|
||||||
// such as a function name in the module.
|
// such as a function name in the module.
|
||||||
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
||||||
let mut llmod_id = link_meta.crateid.name.clone();
|
let mut llmod_id = link_meta.crateid.name.clone();
|
||||||
|
|
|
@ -1527,7 +1527,7 @@ impl EnumMemberDescriptionFactory {
|
||||||
// As far as debuginfo is concerned, the pointer this enum represents is still
|
// As far as debuginfo is concerned, the pointer this enum represents is still
|
||||||
// wrapped in a struct. This is to make the DWARF representation of enums uniform.
|
// wrapped in a struct. This is to make the DWARF representation of enums uniform.
|
||||||
|
|
||||||
// First create a description of the artifical wrapper struct:
|
// First create a description of the artificial wrapper struct:
|
||||||
let non_null_variant = self.variants.get(non_null_variant_index as uint);
|
let non_null_variant = self.variants.get(non_null_variant_index as uint);
|
||||||
let non_null_variant_ident = non_null_variant.name;
|
let non_null_variant_ident = non_null_variant.name;
|
||||||
let non_null_variant_name = token::get_ident(non_null_variant_ident);
|
let non_null_variant_name = token::get_ident(non_null_variant_ident);
|
||||||
|
|
|
@ -204,7 +204,7 @@ impl FnStyleState {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether `check_binop` is part of an assignment or not.
|
/// Whether `check_binop` is part of an assignment or not.
|
||||||
/// Used to know wether we allow user overloads and to print
|
/// Used to know whether we allow user overloads and to print
|
||||||
/// better messages on error.
|
/// better messages on error.
|
||||||
#[deriving(PartialEq)]
|
#[deriving(PartialEq)]
|
||||||
enum IsBinopAssignment{
|
enum IsBinopAssignment{
|
||||||
|
@ -3702,7 +3702,7 @@ pub fn check_const_with_ty(fcx: &FnCtxt,
|
||||||
e: &ast::Expr,
|
e: &ast::Expr,
|
||||||
declty: ty::t) {
|
declty: ty::t) {
|
||||||
// Gather locals in statics (because of block expressions).
|
// Gather locals in statics (because of block expressions).
|
||||||
// This is technically uneccessary because locals in static items are forbidden,
|
// This is technically unnecessary because locals in static items are forbidden,
|
||||||
// but prevents type checking from blowing up before const checking can properly
|
// but prevents type checking from blowing up before const checking can properly
|
||||||
// emit a error.
|
// emit a error.
|
||||||
GatherLocalsVisitor { fcx: fcx }.visit_expr(e, ());
|
GatherLocalsVisitor { fcx: fcx }.visit_expr(e, ());
|
||||||
|
@ -4174,7 +4174,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
fcx.tcx().sess.span_bug(span,
|
fcx.tcx().sess.span_bug(span,
|
||||||
"missing default for a not explicitely provided type param")
|
"missing default for a not explicitly provided type param")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -180,7 +180,7 @@ impl<'f> Coerce<'f> {
|
||||||
self.unpack_actual_value(a, |sty_a| {
|
self.unpack_actual_value(a, |sty_a| {
|
||||||
match *sty_a {
|
match *sty_a {
|
||||||
ty::ty_bare_fn(ref a_f) => {
|
ty::ty_bare_fn(ref a_f) => {
|
||||||
// Bare functions are coercable to any closure type.
|
// Bare functions are coercible to any closure type.
|
||||||
//
|
//
|
||||||
// FIXME(#3320) this should go away and be
|
// FIXME(#3320) this should go away and be
|
||||||
// replaced with proper inference, got a patch
|
// replaced with proper inference, got a patch
|
||||||
|
|
|
@ -372,7 +372,7 @@ pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<
|
||||||
|
|
||||||
pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
|
pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
|
||||||
|
|
||||||
// This is a horible hack - historically, [T] was not treated as a type,
|
// This is a horrible hack - historically, [T] was not treated as a type,
|
||||||
// so, for example, &T and &[U] should not unify. In fact the only thing
|
// so, for example, &T and &[U] should not unify. In fact the only thing
|
||||||
// &[U] should unify with is &[T]. We preserve that behaviour with this
|
// &[U] should unify with is &[T]. We preserve that behaviour with this
|
||||||
// check.
|
// check.
|
||||||
|
|
|
@ -17,7 +17,7 @@ works, it often happens that errors are not detected until far after
|
||||||
the relevant line of code has been type-checked. Therefore, there is
|
the relevant line of code has been type-checked. Therefore, there is
|
||||||
an elaborate system to track why a particular constraint in the
|
an elaborate system to track why a particular constraint in the
|
||||||
inference graph arose so that we can explain to the user what gave
|
inference graph arose so that we can explain to the user what gave
|
||||||
rise to a patricular error.
|
rise to a particular error.
|
||||||
|
|
||||||
The basis of the system are the "origin" types. An "origin" is the
|
The basis of the system are the "origin" types. An "origin" is the
|
||||||
reason that a constraint or inference variable arose. There are
|
reason that a constraint or inference variable arose. There are
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
* The code in here is defined quite generically so that it can be
|
* The code in here is defined quite generically so that it can be
|
||||||
* applied both to type variables, which represent types being inferred,
|
* applied both to type variables, which represent types being inferred,
|
||||||
* and fn variables, which represent function types being inferred.
|
* and fn variables, which represent function types being inferred.
|
||||||
* It may eventually be applied to ther types as well, who knows.
|
* It may eventually be applied to their types as well, who knows.
|
||||||
* In some cases, the functions are also generic with respect to the
|
* In some cases, the functions are also generic with respect to the
|
||||||
* operation on the lattice (GLB vs LUB).
|
* operation on the lattice (GLB vs LUB).
|
||||||
*
|
*
|
||||||
|
|
|
@ -362,7 +362,7 @@ identify and remove strongly connected components (SCC) in the graph.
|
||||||
Note that such components must consist solely of region variables; all
|
Note that such components must consist solely of region variables; all
|
||||||
of these variables can effectively be unified into a single variable.
|
of these variables can effectively be unified into a single variable.
|
||||||
Once SCCs are removed, we are left with a DAG. At this point, we
|
Once SCCs are removed, we are left with a DAG. At this point, we
|
||||||
could walk the DAG in toplogical order once to compute the expanding
|
could walk the DAG in topological order once to compute the expanding
|
||||||
nodes, and again in reverse topological order to compute the
|
nodes, and again in reverse topological order to compute the
|
||||||
contracting nodes. However, as I said, this does not work given the
|
contracting nodes. However, as I said, this does not work given the
|
||||||
current treatment of closure bounds, but perhaps in the future we can
|
current treatment of closure bounds, but perhaps in the future we can
|
||||||
|
@ -617,7 +617,7 @@ created to replace the bound regions in the input types, but it also
|
||||||
contains 'intermediate' variables created to represent the LUB/GLB of
|
contains 'intermediate' variables created to represent the LUB/GLB of
|
||||||
individual regions. Basically, when asked to compute the LUB/GLB of a
|
individual regions. Basically, when asked to compute the LUB/GLB of a
|
||||||
region variable with another region, the inferencer cannot oblige
|
region variable with another region, the inferencer cannot oblige
|
||||||
immediately since the valuese of that variables are not known.
|
immediately since the values of that variables are not known.
|
||||||
Therefore, it creates a new variable that is related to the two
|
Therefore, it creates a new variable that is related to the two
|
||||||
regions. For example, the LUB of two variables `$x` and `$y` is a
|
regions. For example, the LUB of two variables `$x` and `$y` is a
|
||||||
fresh variable `$z` that is constrained such that `$x <= $z` and `$y
|
fresh variable `$z` that is constrained such that `$x <= $z` and `$y
|
||||||
|
|
|
@ -485,7 +485,7 @@ impl<'a> Visitor<()> for ConstraintContext<'a> {
|
||||||
let variant =
|
let variant =
|
||||||
ty::VariantInfo::from_ast_variant(tcx,
|
ty::VariantInfo::from_ast_variant(tcx,
|
||||||
ast_variant,
|
ast_variant,
|
||||||
/*discrimant*/ 0);
|
/*discriminant*/ 0);
|
||||||
for &arg_ty in variant.args.iter() {
|
for &arg_ty in variant.args.iter() {
|
||||||
self.add_constraints_from_ty(arg_ty, self.covariant);
|
self.add_constraints_from_ty(arg_ty, self.covariant);
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,12 +61,12 @@ fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
|
||||||
let (new_high_bits, new_low_bits) = bytes.to_bits();
|
let (new_high_bits, new_low_bits) = bytes.to_bits();
|
||||||
|
|
||||||
if new_high_bits > Zero::zero() {
|
if new_high_bits > Zero::zero() {
|
||||||
fail!("numeric overflow occured.")
|
fail!("numeric overflow occurred.")
|
||||||
}
|
}
|
||||||
|
|
||||||
match bits.checked_add(&new_low_bits) {
|
match bits.checked_add(&new_low_bits) {
|
||||||
Some(x) => return x,
|
Some(x) => return x,
|
||||||
None => fail!("numeric overflow occured.")
|
None => fail!("numeric overflow occurred.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -390,7 +390,7 @@ fn parse_lang_string(string: &str) -> (bool,bool,bool,bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// By default this markdown renderer generates anchors for each header in the
|
/// By default this markdown renderer generates anchors for each header in the
|
||||||
/// rendered document. The anchor name is the contents of the header spearated
|
/// rendered document. The anchor name is the contents of the header separated
|
||||||
/// by hyphens, and a task-local map is used to disambiguate among duplicate
|
/// by hyphens, and a task-local map is used to disambiguate among duplicate
|
||||||
/// headers (numbers are appended).
|
/// headers (numbers are appended).
|
||||||
///
|
///
|
||||||
|
|
|
@ -49,11 +49,11 @@ pub struct TocEntry {
|
||||||
#[deriving(PartialEq)]
|
#[deriving(PartialEq)]
|
||||||
pub struct TocBuilder {
|
pub struct TocBuilder {
|
||||||
top_level: Toc,
|
top_level: Toc,
|
||||||
/// The current heirachy of parent headings, the levels are
|
/// The current hierarchy of parent headings, the levels are
|
||||||
/// strictly increasing (i.e. chain[0].level < chain[1].level <
|
/// strictly increasing (i.e. chain[0].level < chain[1].level <
|
||||||
/// ...) with each entry being the most recent occurance of a
|
/// ...) with each entry being the most recent occurrence of a
|
||||||
/// heading with that level (it doesn't include the most recent
|
/// heading with that level (it doesn't include the most recent
|
||||||
/// occurences of every level, just, if *is* in `chain` then is is
|
/// occurrences of every level, just, if *is* in `chain` then is is
|
||||||
/// the most recent one).
|
/// the most recent one).
|
||||||
///
|
///
|
||||||
/// We also have `chain[0].level <= top_level.entries[last]`.
|
/// We also have `chain[0].level <= top_level.entries[last]`.
|
||||||
|
@ -123,7 +123,7 @@ impl TocBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Push a level `level` heading into the appropriate place in the
|
/// Push a level `level` heading into the appropriate place in the
|
||||||
/// heirarchy, returning a string containing the section number in
|
/// hierarchy, returning a string containing the section number in
|
||||||
/// `<num>.<num>.<num>` format.
|
/// `<num>.<num>.<num>` format.
|
||||||
pub fn push<'a>(&'a mut self, level: u32, name: String, id: String) -> &'a str {
|
pub fn push<'a>(&'a mut self, level: u32, name: String, id: String) -> &'a str {
|
||||||
assert!(level >= 1);
|
assert!(level >= 1);
|
||||||
|
|
|
@ -383,7 +383,7 @@ mod unindent_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_ignore_first_line_indent() {
|
fn should_ignore_first_line_indent() {
|
||||||
// Thi first line of the first paragraph may not be indented as
|
// The first line of the first paragraph may not be indented as
|
||||||
// far due to the way the doc string was written:
|
// far due to the way the doc string was written:
|
||||||
//
|
//
|
||||||
// #[doc = "Start way over here
|
// #[doc = "Start way over here
|
||||||
|
|
|
@ -163,7 +163,7 @@ impl Task {
|
||||||
|
|
||||||
// Here we must unsafely borrow the task in order to not remove it from
|
// Here we must unsafely borrow the task in order to not remove it from
|
||||||
// TLS. When collecting failure, we may attempt to send on a channel (or
|
// TLS. When collecting failure, we may attempt to send on a channel (or
|
||||||
// just run aribitrary code), so we must be sure to still have a local
|
// just run arbitrary code), so we must be sure to still have a local
|
||||||
// task in TLS.
|
// task in TLS.
|
||||||
unsafe {
|
unsafe {
|
||||||
let me: *mut Task = Local::unsafe_borrow();
|
let me: *mut Task = Local::unsafe_borrow();
|
||||||
|
|
|
@ -395,7 +395,7 @@ pub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> !
|
||||||
/// The core of the unwinding.
|
/// The core of the unwinding.
|
||||||
///
|
///
|
||||||
/// This is non-generic to avoid instantiation bloat in other crates
|
/// This is non-generic to avoid instantiation bloat in other crates
|
||||||
/// (which makes compilation of small crates noticably slower). (Note:
|
/// (which makes compilation of small crates noticeably slower). (Note:
|
||||||
/// we need the `Any` object anyway, we're not just creating it to
|
/// we need the `Any` object anyway, we're not just creating it to
|
||||||
/// avoid being generic.)
|
/// avoid being generic.)
|
||||||
///
|
///
|
||||||
|
@ -408,7 +408,7 @@ fn begin_unwind_inner(msg: Box<Any:Send>,
|
||||||
// First, invoke call the user-defined callbacks triggered on task failure.
|
// First, invoke call the user-defined callbacks triggered on task failure.
|
||||||
//
|
//
|
||||||
// By the time that we see a callback has been registered (by reading
|
// By the time that we see a callback has been registered (by reading
|
||||||
// MAX_CALLBACKS), the actuall callback itself may have not been stored yet,
|
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
|
||||||
// so we just chalk it up to a race condition and move on to the next
|
// so we just chalk it up to a race condition and move on to the next
|
||||||
// callback. Additionally, CALLBACK_CNT may briefly be higher than
|
// callback. Additionally, CALLBACK_CNT may briefly be higher than
|
||||||
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
|
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
|
||||||
|
|
|
@ -212,7 +212,7 @@ impl ForbidSwitch {
|
||||||
impl Drop for ForbidSwitch {
|
impl Drop for ForbidSwitch {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
assert!(self.io == homing::local_id(),
|
assert!(self.io == homing::local_id(),
|
||||||
"didnt want a scheduler switch: {}",
|
"didn't want a scheduler switch: {}",
|
||||||
self.msg);
|
self.msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,7 +147,7 @@ impl StreamWatcher {
|
||||||
// function is why that wording exists.
|
// function is why that wording exists.
|
||||||
//
|
//
|
||||||
// Implementation-wise, we must be careful when passing a buffer down to
|
// Implementation-wise, we must be careful when passing a buffer down to
|
||||||
// libuv. Most of this implementation avoids allocations becuase of the
|
// libuv. Most of this implementation avoids allocations because of the
|
||||||
// blocking guarantee (all stack local variables are valid for the
|
// blocking guarantee (all stack local variables are valid for the
|
||||||
// entire read/write request). If our write request can be timed out,
|
// entire read/write request). If our write request can be timed out,
|
||||||
// however, we must heap allocate the data and pass that to the libuv
|
// however, we must heap allocate the data and pass that to the libuv
|
||||||
|
@ -164,7 +164,7 @@ impl StreamWatcher {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Send off the request, but be careful to not block until we're sure
|
// Send off the request, but be careful to not block until we're sure
|
||||||
// that the write reqeust is queued. If the reqeust couldn't be queued,
|
// that the write request is queued. If the request couldn't be queued,
|
||||||
// then we should return immediately with an error.
|
// then we should return immediately with an error.
|
||||||
match unsafe {
|
match unsafe {
|
||||||
uvll::uv_write(req.handle, self.handle, [uv_buf], write_cb)
|
uvll::uv_write(req.handle, self.handle, [uv_buf], write_cb)
|
||||||
|
|
|
@ -542,7 +542,7 @@ impl<T: Send> Sender<T> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn send_opt(&self, t: T) -> Result<(), T> {
|
pub fn send_opt(&self, t: T) -> Result<(), T> {
|
||||||
// In order to prevent starvation of other tasks in situations where
|
// In order to prevent starvation of other tasks in situations where
|
||||||
// a task sends repeatedly without ever receiving, we occassionally
|
// a task sends repeatedly without ever receiving, we occasionally
|
||||||
// yield instead of doing a send immediately.
|
// yield instead of doing a send immediately.
|
||||||
//
|
//
|
||||||
// Don't unconditionally attempt to yield because the TLS overhead can
|
// Don't unconditionally attempt to yield because the TLS overhead can
|
||||||
|
|
|
@ -513,7 +513,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mutex_arc_nested() {
|
fn test_mutex_arc_nested() {
|
||||||
// Tests nested mutexes and access
|
// Tests nested mutexes and access
|
||||||
// to underlaying data.
|
// to underlying data.
|
||||||
let arc = Arc::new(Mutex::new(1));
|
let arc = Arc::new(Mutex::new(1));
|
||||||
let arc2 = Arc::new(Mutex::new(arc));
|
let arc2 = Arc::new(Mutex::new(arc));
|
||||||
task::spawn(proc() {
|
task::spawn(proc() {
|
||||||
|
|
|
@ -71,7 +71,7 @@ impl Once {
|
||||||
|
|
||||||
// Implementation-wise, this would seem like a fairly trivial primitive.
|
// Implementation-wise, this would seem like a fairly trivial primitive.
|
||||||
// The stickler part is where our mutexes currently require an
|
// The stickler part is where our mutexes currently require an
|
||||||
// allocation, and usage of a `Once` should't leak this allocation.
|
// allocation, and usage of a `Once` shouldn't leak this allocation.
|
||||||
//
|
//
|
||||||
// This means that there must be a deterministic destroyer of the mutex
|
// This means that there must be a deterministic destroyer of the mutex
|
||||||
// contained within (because it's not needed after the initialization
|
// contained within (because it's not needed after the initialization
|
||||||
|
|
|
@ -195,7 +195,7 @@ impl DummyResult {
|
||||||
/// Create a default MacResult that can only be an expression.
|
/// Create a default MacResult that can only be an expression.
|
||||||
///
|
///
|
||||||
/// Use this for macros that must expand to an expression, so even
|
/// Use this for macros that must expand to an expression, so even
|
||||||
/// if an error is encountered internally, the user will recieve
|
/// if an error is encountered internally, the user will receive
|
||||||
/// an error that they also used it in the wrong place.
|
/// an error that they also used it in the wrong place.
|
||||||
pub fn expr(sp: Span) -> Box<MacResult> {
|
pub fn expr(sp: Span) -> Box<MacResult> {
|
||||||
box DummyResult { expr_only: true, span: sp } as Box<MacResult>
|
box DummyResult { expr_only: true, span: sp } as Box<MacResult>
|
||||||
|
|
|
@ -77,7 +77,7 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, substr: &Substru
|
||||||
```
|
```
|
||||||
|
|
||||||
The optimiser should remove the redundancy. We explicitly
|
The optimiser should remove the redundancy. We explicitly
|
||||||
get use the binops to avoid auto-deref derefencing too many
|
get use the binops to avoid auto-deref dereferencing too many
|
||||||
layers of pointers, if the type includes pointers.
|
layers of pointers, if the type includes pointers.
|
||||||
*/
|
*/
|
||||||
let other_f = match other_fs {
|
let other_f = match other_fs {
|
||||||
|
|
|
@ -723,7 +723,7 @@ impl<'a> MethodDef<'a> {
|
||||||
&Struct(fields));
|
&Struct(fields));
|
||||||
|
|
||||||
// make a series of nested matches, to destructure the
|
// make a series of nested matches, to destructure the
|
||||||
// structs. This is actually right-to-left, but it shoudn't
|
// structs. This is actually right-to-left, but it shouldn't
|
||||||
// matter.
|
// matter.
|
||||||
for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) {
|
for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) {
|
||||||
body = cx.expr_match(trait_.span, arg_expr,
|
body = cx.expr_match(trait_.span, arg_expr,
|
||||||
|
|
|
@ -58,7 +58,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||||
dotdotdoted: false,
|
dotdotdoted: false,
|
||||||
sep: None,
|
sep: None,
|
||||||
}),
|
}),
|
||||||
interpolations: match interp { /* just a convienience */
|
interpolations: match interp { /* just a convenience */
|
||||||
None => HashMap::new(),
|
None => HashMap::new(),
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
},
|
},
|
||||||
|
|
|
@ -112,7 +112,7 @@ impl<'a> ParserAttr for Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse attributes that appear after the opening of an item. These should
|
// Parse attributes that appear after the opening of an item. These should
|
||||||
// be preceded by an exclaimation mark, but we accept and warn about one
|
// be preceded by an exclamation mark, but we accept and warn about one
|
||||||
// terminated by a semicolon. In addition to a vector of inner attributes,
|
// terminated by a semicolon. In addition to a vector of inner attributes,
|
||||||
// this function also returns a vector that may contain the first outer
|
// this function also returns a vector that may contain the first outer
|
||||||
// attribute of the next item (since we can't know whether the attribute
|
// attribute of the next item (since we can't know whether the attribute
|
||||||
|
|
|
@ -2582,7 +2582,7 @@ impl<'a> Parser<'a> {
|
||||||
self.mk_expr(lo, hi, ExprLoop(body, opt_ident))
|
self.mk_expr(lo, hi, ExprLoop(body, opt_ident))
|
||||||
}
|
}
|
||||||
|
|
||||||
// For distingishing between struct literals and blocks
|
// For distinguishing between struct literals and blocks
|
||||||
fn looking_at_struct_literal(&mut self) -> bool {
|
fn looking_at_struct_literal(&mut self) -> bool {
|
||||||
self.token == token::LBRACE &&
|
self.token == token::LBRACE &&
|
||||||
((self.look_ahead(1, |t| token::is_plain_ident(t)) &&
|
((self.look_ahead(1, |t| token::is_plain_ident(t)) &&
|
||||||
|
|
|
@ -233,7 +233,7 @@ pub fn mk_printer(out: Box<io::Writer>, linewidth: uint) -> Printer {
|
||||||
*
|
*
|
||||||
* There is a parallel ring buffer, 'size', that holds the calculated size of
|
* There is a parallel ring buffer, 'size', that holds the calculated size of
|
||||||
* each token. Why calculated? Because for Begin/End pairs, the "size"
|
* each token. Why calculated? Because for Begin/End pairs, the "size"
|
||||||
* includes everything betwen the pair. That is, the "size" of Begin is
|
* includes everything between the pair. That is, the "size" of Begin is
|
||||||
* actually the sum of the sizes of everything between Begin and the paired
|
* actually the sum of the sizes of everything between Begin and the paired
|
||||||
* End that follows. Since that is arbitrarily far in the future, 'size' is
|
* End that follows. Since that is arbitrarily far in the future, 'size' is
|
||||||
* being rewritten regularly while the printer runs; in fact most of the
|
* being rewritten regularly while the printer runs; in fact most of the
|
||||||
|
@ -434,7 +434,7 @@ impl Printer {
|
||||||
assert!((self.right != self.left));
|
assert!((self.right != self.left));
|
||||||
}
|
}
|
||||||
pub fn advance_left(&mut self, x: Token, l: int) -> io::IoResult<()> {
|
pub fn advance_left(&mut self, x: Token, l: int) -> io::IoResult<()> {
|
||||||
debug!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right,
|
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
|
||||||
self.left, l);
|
self.left, l);
|
||||||
if l >= 0 {
|
if l >= 0 {
|
||||||
let ret = self.print(x.clone(), l);
|
let ret = self.print(x.clone(), l);
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub fn generics_of_fn(fk: &FnKind) -> Generics {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Each method of the Visitor trait is a hook to be potentially
|
/// Each method of the Visitor trait is a hook to be potentially
|
||||||
/// overriden. Each method's default implementation recursively visits
|
/// overridden. Each method's default implementation recursively visits
|
||||||
/// the substructure of the input via the corresponding `walk` method;
|
/// the substructure of the input via the corresponding `walk` method;
|
||||||
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
|
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
|
||||||
///
|
///
|
||||||
|
|
|
@ -32,7 +32,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> {
|
||||||
Some(dir) => dirs_to_search.push(Path::new(dir)),
|
Some(dir) => dirs_to_search.push(Path::new(dir)),
|
||||||
None => {
|
None => {
|
||||||
if homedir.is_some() {
|
if homedir.is_some() {
|
||||||
// ncurses compatability;
|
// ncurses compatibility;
|
||||||
dirs_to_search.push(homedir.unwrap().join(".terminfo"))
|
dirs_to_search.push(homedir.unwrap().join(".terminfo"))
|
||||||
}
|
}
|
||||||
match getenv("TERMINFO_DIRS") {
|
match getenv("TERMINFO_DIRS") {
|
||||||
|
|
|
@ -462,7 +462,7 @@ impl FromStr for Uuid {
|
||||||
/// Parse a hex string and interpret as a UUID
|
/// Parse a hex string and interpret as a UUID
|
||||||
///
|
///
|
||||||
/// Accepted formats are a sequence of 32 hexadecimal characters,
|
/// Accepted formats are a sequence of 32 hexadecimal characters,
|
||||||
/// with or without hypens (grouped as 8, 4, 4, 4, 12).
|
/// with or without hyphens (grouped as 8, 4, 4, 4, 12).
|
||||||
fn from_str(us: &str) -> Option<Uuid> {
|
fn from_str(us: &str) -> Option<Uuid> {
|
||||||
let result = Uuid::parse_string(us);
|
let result = Uuid::parse_string(us);
|
||||||
match result {
|
match result {
|
||||||
|
@ -492,7 +492,7 @@ impl Eq for Uuid {}
|
||||||
|
|
||||||
// FIXME #9845: Test these more thoroughly
|
// FIXME #9845: Test these more thoroughly
|
||||||
impl<T: Encoder<E>, E> Encodable<T, E> for Uuid {
|
impl<T: Encoder<E>, E> Encodable<T, E> for Uuid {
|
||||||
/// Encode a UUID as a hypenated string
|
/// Encode a UUID as a hyphenated string
|
||||||
fn encode(&self, e: &mut T) -> Result<(), E> {
|
fn encode(&self, e: &mut T) -> Result<(), E> {
|
||||||
e.emit_str(self.to_hyphenated_str().as_slice())
|
e.emit_str(self.to_hyphenated_str().as_slice())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue