fix various typos in doc comments
This commit is contained in:
parent
0195812aea
commit
4e35cbb22e
49 changed files with 68 additions and 68 deletions
|
@ -69,7 +69,7 @@ struct LeafNode<K, V> {
|
||||||
|
|
||||||
/// This node's index into the parent node's `edges` array.
|
/// This node's index into the parent node's `edges` array.
|
||||||
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
|
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
|
||||||
/// This is only guaranteed to be initialized when `parent` is nonnull.
|
/// This is only guaranteed to be initialized when `parent` is non-null.
|
||||||
parent_idx: MaybeUninit<u16>,
|
parent_idx: MaybeUninit<u16>,
|
||||||
|
|
||||||
/// The number of keys and values this node stores.
|
/// The number of keys and values this node stores.
|
||||||
|
|
|
@ -44,7 +44,7 @@ use boxed::Box;
|
||||||
/// This enables you to use capacity growing logic catch the overflows in your length
|
/// This enables you to use capacity growing logic catch the overflows in your length
|
||||||
/// that might occur with zero-sized types.
|
/// that might occur with zero-sized types.
|
||||||
///
|
///
|
||||||
/// However this means that you need to be careful when roundtripping this type
|
/// However this means that you need to be careful when round-tripping this type
|
||||||
/// with a `Box<[T]>`: `cap()` won't yield the len. However `with_capacity`,
|
/// with a `Box<[T]>`: `cap()` won't yield the len. However `with_capacity`,
|
||||||
/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
|
/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
|
||||||
/// field. This allows zero-sized types to not be special-cased by consumers of
|
/// field. This allows zero-sized types to not be special-cased by consumers of
|
||||||
|
|
|
@ -445,7 +445,7 @@ impl f32 {
|
||||||
/// signaling NaNs on MIPS are quiet NaNs on x86, and vice-versa.
|
/// signaling NaNs on MIPS are quiet NaNs on x86, and vice-versa.
|
||||||
///
|
///
|
||||||
/// Rather than trying to preserve signaling-ness cross-platform, this
|
/// Rather than trying to preserve signaling-ness cross-platform, this
|
||||||
/// implementation favours preserving the exact bits. This means that
|
/// implementation favors preserving the exact bits. This means that
|
||||||
/// any payloads encoded in NaNs will be preserved even if the result of
|
/// any payloads encoded in NaNs will be preserved even if the result of
|
||||||
/// this method is sent over the network from an x86 machine to a MIPS one.
|
/// this method is sent over the network from an x86 machine to a MIPS one.
|
||||||
///
|
///
|
||||||
|
|
|
@ -108,7 +108,7 @@ impl Drop for Waker {
|
||||||
/// is ready to be run.
|
/// is ready to be run.
|
||||||
///
|
///
|
||||||
/// This is similar to the `Waker` type, but cannot be sent across threads.
|
/// This is similar to the `Waker` type, but cannot be sent across threads.
|
||||||
/// Task executors can use this type to implement more optimized singlethreaded wakeup
|
/// Task executors can use this type to implement more optimized single-threaded wakeup
|
||||||
/// behavior.
|
/// behavior.
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
|
@ -535,7 +535,7 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prints token treee in a form convenient for debugging.
|
/// Prints token tree in a form convenient for debugging.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
impl fmt::Debug for TokenTree {
|
impl fmt::Debug for TokenTree {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
@ -730,7 +730,7 @@ impl fmt::Debug for Group {
|
||||||
|
|
||||||
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
|
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
|
||||||
///
|
///
|
||||||
/// Multicharacter operators like `+=` are represented as two instances of `Punct` with different
|
/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
|
||||||
/// forms of `Spacing` returned.
|
/// forms of `Spacing` returned.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -788,7 +788,7 @@ impl Punct {
|
||||||
|
|
||||||
/// Returns the spacing of this punctuation character, indicating whether it's immediately
|
/// Returns the spacing of this punctuation character, indicating whether it's immediately
|
||||||
/// followed by another `Punct` in the token stream, so they can potentially be combined into
|
/// followed by another `Punct` in the token stream, so they can potentially be combined into
|
||||||
/// a multicharacter operator (`Joint`), or it's followed by some other token or whitespace
|
/// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
|
||||||
/// (`Alone`) so the operator has certainly ended.
|
/// (`Alone`) so the operator has certainly ended.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub fn spacing(&self) -> Spacing {
|
pub fn spacing(&self) -> Spacing {
|
||||||
|
@ -947,7 +947,7 @@ macro_rules! suffixed_int_literals {
|
||||||
/// This function will create an integer like `1u32` where the integer
|
/// This function will create an integer like `1u32` where the integer
|
||||||
/// value specified is the first part of the token and the integral is
|
/// value specified is the first part of the token and the integral is
|
||||||
/// also suffixed at the end.
|
/// also suffixed at the end.
|
||||||
/// Literals created from negative numbers may not survive rountrips through
|
/// Literals created from negative numbers may not survive round-trips through
|
||||||
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
|
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
|
||||||
///
|
///
|
||||||
/// Literals created through this method have the `Span::call_site()`
|
/// Literals created through this method have the `Span::call_site()`
|
||||||
|
@ -1047,7 +1047,7 @@ impl Literal {
|
||||||
|
|
||||||
/// Creates a new suffixed floating-point literal.
|
/// Creates a new suffixed floating-point literal.
|
||||||
///
|
///
|
||||||
/// This consturctor will create a literal like `1.0f32` where the value
|
/// This constructor will create a literal like `1.0f32` where the value
|
||||||
/// specified is the preceding part of the token and `f32` is the suffix of
|
/// specified is the preceding part of the token and `f32` is the suffix of
|
||||||
/// the token. This token will always be inferred to be an `f32` in the
|
/// the token. This token will always be inferred to be an `f32` in the
|
||||||
/// compiler.
|
/// compiler.
|
||||||
|
@ -1096,7 +1096,7 @@ impl Literal {
|
||||||
|
|
||||||
/// Creates a new suffixed floating-point literal.
|
/// Creates a new suffixed floating-point literal.
|
||||||
///
|
///
|
||||||
/// This consturctor will create a literal like `1.0f64` where the value
|
/// This constructor will create a literal like `1.0f64` where the value
|
||||||
/// specified is the preceding part of the token and `f64` is the suffix of
|
/// specified is the preceding part of the token and `f64` is the suffix of
|
||||||
/// the token. This token will always be inferred to be an `f64` in the
|
/// the token. This token will always be inferred to be an `f64` in the
|
||||||
/// compiler.
|
/// compiler.
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
//! Some facilities for tracking how codegen-units are reused during incremental
|
//! Some facilities for tracking how codegen-units are reused during incremental
|
||||||
//! compilition. This is used for incremental compiliation tests and debug
|
//! compilation. This is used for incremental compilation tests and debug
|
||||||
//! output.
|
//! output.
|
||||||
|
|
||||||
use session::Session;
|
use session::Session;
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub enum NonMacroAttrKind {
|
||||||
Tool,
|
Tool,
|
||||||
/// Single-segment custom attribute registered by a derive macro (`#[serde(default)]`).
|
/// Single-segment custom attribute registered by a derive macro (`#[serde(default)]`).
|
||||||
DeriveHelper,
|
DeriveHelper,
|
||||||
/// Single-segment custom attriubte registered by a legacy plugin (`register_attribute`).
|
/// Single-segment custom attribute registered by a legacy plugin (`register_attribute`).
|
||||||
LegacyPluginHelper,
|
LegacyPluginHelper,
|
||||||
/// Single-segment custom attribute not registered in any way (`#[my_attr]`).
|
/// Single-segment custom attribute not registered in any way (`#[my_attr]`).
|
||||||
Custom,
|
Custom,
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
//! This module contains the "canonicalizer" itself.
|
//! This module contains the "canonicalizer" itself.
|
||||||
//!
|
//!
|
||||||
//! For an overview of what canonicaliation is and how it fits into
|
//! For an overview of what canonicalization is and how it fits into
|
||||||
//! rustc, check out the [chapter in the rustc guide][c].
|
//! rustc, check out the [chapter in the rustc guide][c].
|
||||||
//!
|
//!
|
||||||
//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
|
//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
|
||||||
|
|
|
@ -556,7 +556,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given two sets of values for the same set of canonical variables, unify them.
|
/// Given two sets of values for the same set of canonical variables, unify them.
|
||||||
/// The second set is produced lazilly by supplying indices from the first set.
|
/// The second set is produced lazily by supplying indices from the first set.
|
||||||
fn unify_canonical_vars(
|
fn unify_canonical_vars(
|
||||||
&self,
|
&self,
|
||||||
cause: &ObligationCause<'tcx>,
|
cause: &ObligationCause<'tcx>,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
//! This module contains code to substitute new values into a
|
//! This module contains code to substitute new values into a
|
||||||
//! `Canonical<'tcx, T>`.
|
//! `Canonical<'tcx, T>`.
|
||||||
//!
|
//!
|
||||||
//! For an overview of what canonicaliation is and how it fits into
|
//! For an overview of what canonicalization is and how it fits into
|
||||||
//! rustc, check out the [chapter in the rustc guide][c].
|
//! rustc, check out the [chapter in the rustc guide][c].
|
||||||
//!
|
//!
|
||||||
//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
|
//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
|
||||||
|
|
|
@ -20,7 +20,7 @@ use util::common::ErrorReported;
|
||||||
use infer::lexical_region_resolve::RegionResolutionError::SubSupConflict;
|
use infer::lexical_region_resolve::RegionResolutionError::SubSupConflict;
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
|
||||||
/// Print the error message for lifetime errors when binding excapes a closure.
|
/// Print the error message for lifetime errors when binding escapes a closure.
|
||||||
///
|
///
|
||||||
/// Consider a case where we have
|
/// Consider a case where we have
|
||||||
///
|
///
|
||||||
|
|
|
@ -428,7 +428,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
///
|
///
|
||||||
/// This routine is only intended to be used when the leak-check has
|
/// This routine is only intended to be used when the leak-check has
|
||||||
/// passed; currently, it's used in the trait matching code to create
|
/// passed; currently, it's used in the trait matching code to create
|
||||||
/// a set of nested obligations frmo an impl that matches against
|
/// a set of nested obligations from an impl that matches against
|
||||||
/// something higher-ranked. More details can be found in
|
/// something higher-ranked. More details can be found in
|
||||||
/// `librustc/middle/traits/README.md`.
|
/// `librustc/middle/traits/README.md`.
|
||||||
///
|
///
|
||||||
|
|
|
@ -1160,10 +1160,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Takes ownership of the list of variable regions. This implies
|
/// Takes ownership of the list of variable regions. This implies
|
||||||
/// that all the region constriants have already been taken, and
|
/// that all the region constraints have already been taken, and
|
||||||
/// hence that `resolve_regions_and_report_errors` can never be
|
/// hence that `resolve_regions_and_report_errors` can never be
|
||||||
/// called. This is used only during NLL processing to "hand off" ownership
|
/// called. This is used only during NLL processing to "hand off" ownership
|
||||||
/// of the set of region vairables into the NLL region context.
|
/// of the set of region variables into the NLL region context.
|
||||||
pub fn take_region_var_origins(&self) -> VarInfos {
|
pub fn take_region_var_origins(&self) -> VarInfos {
|
||||||
let (var_infos, data) = self.region_constraints
|
let (var_infos, data) = self.region_constraints
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
|
@ -1478,7 +1478,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clears the selection, evaluation, and projection caches. This is useful when
|
/// Clears the selection, evaluation, and projection caches. This is useful when
|
||||||
/// repeatedly attemping to select an Obligation while changing only
|
/// repeatedly attempting to select an Obligation while changing only
|
||||||
/// its ParamEnv, since FulfillmentContext doesn't use 'probe'
|
/// its ParamEnv, since FulfillmentContext doesn't use 'probe'
|
||||||
pub fn clear_caches(&self) {
|
pub fn clear_caches(&self) {
|
||||||
self.selection_cache.clear();
|
self.selection_cache.clear();
|
||||||
|
|
|
@ -320,7 +320,7 @@ impl<'tcx> TypeVariableTable<'tcx> {
|
||||||
/// but which have only been unified since `s` started, and
|
/// but which have only been unified since `s` started, and
|
||||||
/// return the types with which they were unified. So if we had
|
/// return the types with which they were unified. So if we had
|
||||||
/// a type variable `V0`, then we started the snapshot, then we
|
/// a type variable `V0`, then we started the snapshot, then we
|
||||||
/// created a type variable `V1`, unifed `V0` with `T0`, and
|
/// created a type variable `V1`, unified `V0` with `T0`, and
|
||||||
/// unified `V1` with `T1`, this function would return `{T0}`.
|
/// unified `V1` with `T1`, this function would return `{T0}`.
|
||||||
pub fn types_escaping_snapshot(&mut self, s: &Snapshot<'tcx>) -> Vec<Ty<'tcx>> {
|
pub fn types_escaping_snapshot(&mut self, s: &Snapshot<'tcx>) -> Vec<Ty<'tcx>> {
|
||||||
let mut new_elem_threshold = u32::MAX;
|
let mut new_elem_threshold = u32::MAX;
|
||||||
|
|
|
@ -189,7 +189,7 @@ impl<'a> LintLevelsBuilder<'a> {
|
||||||
/// This function will perform a number of tasks:
|
/// This function will perform a number of tasks:
|
||||||
///
|
///
|
||||||
/// * It'll validate all lint-related attributes in `attrs`
|
/// * It'll validate all lint-related attributes in `attrs`
|
||||||
/// * It'll mark all lint-related attriutes as used
|
/// * It'll mark all lint-related attributes as used
|
||||||
/// * Lint levels will be updated based on the attributes provided
|
/// * Lint levels will be updated based on the attributes provided
|
||||||
/// * Lint attributes are validated, e.g. a #[forbid] can't be switched to
|
/// * Lint attributes are validated, e.g. a #[forbid] can't be switched to
|
||||||
/// #[allow]
|
/// #[allow]
|
||||||
|
|
|
@ -224,7 +224,7 @@ impl Default for ErrorOutputType {
|
||||||
|
|
||||||
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
||||||
// DO NOT switch BTreeMap out for an unsorted container type! That would break
|
// DO NOT switch BTreeMap out for an unsorted container type! That would break
|
||||||
// dependency tracking for commandline arguments.
|
// dependency tracking for command-line arguments.
|
||||||
#[derive(Clone, Hash)]
|
#[derive(Clone, Hash)]
|
||||||
pub struct OutputTypes(BTreeMap<OutputType, Option<PathBuf>>);
|
pub struct OutputTypes(BTreeMap<OutputType, Option<PathBuf>>);
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ impl OutputTypes {
|
||||||
|
|
||||||
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
||||||
// DO NOT switch BTreeMap or BTreeSet out for an unsorted container type! That
|
// DO NOT switch BTreeMap or BTreeSet out for an unsorted container type! That
|
||||||
// would break dependency tracking for commandline arguments.
|
// would break dependency tracking for command-line arguments.
|
||||||
#[derive(Clone, Hash)]
|
#[derive(Clone, Hash)]
|
||||||
pub struct Externs(BTreeMap<String, BTreeSet<Option<String>>>);
|
pub struct Externs(BTreeMap<String, BTreeSet<Option<String>>>);
|
||||||
|
|
||||||
|
@ -339,7 +339,7 @@ macro_rules! top_level_options {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The top-level commandline options struct
|
// The top-level command-line options struct
|
||||||
//
|
//
|
||||||
// For each option, one has to specify how it behaves with regard to the
|
// For each option, one has to specify how it behaves with regard to the
|
||||||
// dependency tracking system of incremental compilation. This is done via the
|
// dependency tracking system of incremental compilation. This is done via the
|
||||||
|
@ -2377,11 +2377,11 @@ impl fmt::Display for CrateType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Commandline arguments passed to the compiler have to be incorporated with
|
/// Command-line arguments passed to the compiler have to be incorporated with
|
||||||
/// the dependency tracking system for incremental compilation. This module
|
/// the dependency tracking system for incremental compilation. This module
|
||||||
/// provides some utilities to make this more convenient.
|
/// provides some utilities to make this more convenient.
|
||||||
///
|
///
|
||||||
/// The values of all commandline arguments that are relevant for dependency
|
/// The values of all command-line arguments that are relevant for dependency
|
||||||
/// tracking are hashed into a single value that determines whether the
|
/// tracking are hashed into a single value that determines whether the
|
||||||
/// incremental compilation cache can be re-used or not. This hashing is done
|
/// incremental compilation cache can be re-used or not. This hashing is done
|
||||||
/// via the DepTrackingHash trait defined below, since the standard Hash
|
/// via the DepTrackingHash trait defined below, since the standard Hash
|
||||||
|
@ -2394,7 +2394,7 @@ impl fmt::Display for CrateType {
|
||||||
/// impl_dep_tracking_hash_via_hash!() macro that allows to simply reuse the
|
/// impl_dep_tracking_hash_via_hash!() macro that allows to simply reuse the
|
||||||
/// Hash implementation for DepTrackingHash. It's important though that
|
/// Hash implementation for DepTrackingHash. It's important though that
|
||||||
/// we have an opt-in scheme here, so one is hopefully forced to think about
|
/// we have an opt-in scheme here, so one is hopefully forced to think about
|
||||||
/// how the hash should be calculated when adding a new commandline argument.
|
/// how the hash should be calculated when adding a new command-line argument.
|
||||||
mod dep_tracking {
|
mod dep_tracking {
|
||||||
use lint;
|
use lint;
|
||||||
use middle::cstore;
|
use middle::cstore;
|
||||||
|
|
|
@ -200,7 +200,7 @@ impl_stable_hash_for!(struct DtorckConstraint<'tcx> {
|
||||||
/// trivial for dropck-outlives.
|
/// trivial for dropck-outlives.
|
||||||
///
|
///
|
||||||
/// Note also that `needs_drop` requires a "global" type (i.e., one
|
/// Note also that `needs_drop` requires a "global" type (i.e., one
|
||||||
/// with erased regions), but this funtcion does not.
|
/// with erased regions), but this function does not.
|
||||||
pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
|
pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
|
||||||
match ty.sty {
|
match ty.sty {
|
||||||
// None of these types have a destructor and hence they do not
|
// None of these types have a destructor and hence they do not
|
||||||
|
|
|
@ -783,7 +783,7 @@ pub fn shift_vars<'a, 'gcx, 'tcx, T>(
|
||||||
///
|
///
|
||||||
/// Note that what I'm calling an "escaping var" is often just called a "free var". However,
|
/// Note that what I'm calling an "escaping var" is often just called a "free var". However,
|
||||||
/// we already use the term "free var". It refers to the regions or types that we use to represent
|
/// we already use the term "free var". It refers to the regions or types that we use to represent
|
||||||
/// bound regions or type params on a fn definition while we are typechecking its body.
|
/// bound regions or type params on a fn definition while we are type checking its body.
|
||||||
///
|
///
|
||||||
/// To clarify, conceptually there is no particular difference between
|
/// To clarify, conceptually there is no particular difference between
|
||||||
/// an "escaping" var and a "free" var. However, there is a big
|
/// an "escaping" var and a "free" var. However, there is a big
|
||||||
|
@ -859,7 +859,7 @@ struct LateBoundRegionsCollector {
|
||||||
|
|
||||||
/// If true, we only want regions that are known to be
|
/// If true, we only want regions that are known to be
|
||||||
/// "constrained" when you equate this type with another type. In
|
/// "constrained" when you equate this type with another type. In
|
||||||
/// partcular, if you have e.g. `&'a u32` and `&'b u32`, equating
|
/// particular, if you have e.g. `&'a u32` and `&'b u32`, equating
|
||||||
/// them constraints `'a == 'b`. But if you have `<&'a u32 as
|
/// them constraints `'a == 'b`. But if you have `<&'a u32 as
|
||||||
/// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those
|
/// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those
|
||||||
/// types may mean that `'a` and `'b` don't appear in the results,
|
/// types may mean that `'a` and `'b` don't appear in the results,
|
||||||
|
|
|
@ -1279,7 +1279,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
|
||||||
/// Type size "skeleton", i.e. the only information determining a type's size.
|
/// Type size "skeleton", i.e. the only information determining a type's size.
|
||||||
/// While this is conservative, (aside from constant sizes, only pointers,
|
/// While this is conservative, (aside from constant sizes, only pointers,
|
||||||
/// newtypes thereof and null pointer optimized enums are allowed), it is
|
/// newtypes thereof and null pointer optimized enums are allowed), it is
|
||||||
/// enough to statically check common usecases of transmute.
|
/// enough to statically check common use cases of transmute.
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum SizeSkeleton<'tcx> {
|
pub enum SizeSkeleton<'tcx> {
|
||||||
/// Any statically computable Layout.
|
/// Any statically computable Layout.
|
||||||
|
|
|
@ -2394,7 +2394,7 @@ impl<'a, 'gcx, 'tcx> FieldDef {
|
||||||
|
|
||||||
/// Represents the various closure traits in the Rust language. This
|
/// Represents the various closure traits in the Rust language. This
|
||||||
/// will determine the type of the environment (`self`, in the
|
/// will determine the type of the environment (`self`, in the
|
||||||
/// desuaring) argument that the closure expects.
|
/// desugaring) argument that the closure expects.
|
||||||
///
|
///
|
||||||
/// You can get the environment type of a closure using
|
/// You can get the environment type of a closure using
|
||||||
/// `tcx.closure_env_ty()`.
|
/// `tcx.closure_env_ty()`.
|
||||||
|
|
|
@ -941,7 +941,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
///
|
///
|
||||||
/// In the process of working on rust-lang/rust#55238 a mysterious segfault was
|
/// In the process of working on rust-lang/rust#55238 a mysterious segfault was
|
||||||
/// stumbled upon. The segfault was never reproduced locally, but it was
|
/// stumbled upon. The segfault was never reproduced locally, but it was
|
||||||
/// suspected to be releated to the fact that codegen worker threads were
|
/// suspected to be related to the fact that codegen worker threads were
|
||||||
/// sticking around by the time the main thread was exiting, causing issues.
|
/// sticking around by the time the main thread was exiting, causing issues.
|
||||||
///
|
///
|
||||||
/// This structure is an attempt to fix that issue where the `codegen_aborted`
|
/// This structure is an attempt to fix that issue where the `codegen_aborted`
|
||||||
|
|
|
@ -184,7 +184,7 @@ const WASM_WHITELIST: &[(&str, Option<&str>)] = &[
|
||||||
];
|
];
|
||||||
|
|
||||||
/// When rustdoc is running, provide a list of all known features so that all their respective
|
/// When rustdoc is running, provide a list of all known features so that all their respective
|
||||||
/// primtives may be documented.
|
/// primitives may be documented.
|
||||||
///
|
///
|
||||||
/// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this
|
/// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this
|
||||||
/// iterator!
|
/// iterator!
|
||||||
|
|
|
@ -79,7 +79,7 @@
|
||||||
//! - In order to be able to also use symbols from two versions of the same
|
//! - In order to be able to also use symbols from two versions of the same
|
||||||
//! crate (which naturally also have the same name), a stronger measure is
|
//! crate (which naturally also have the same name), a stronger measure is
|
||||||
//! required: The compiler accepts an arbitrary "disambiguator" value via the
|
//! required: The compiler accepts an arbitrary "disambiguator" value via the
|
||||||
//! `-C metadata` commandline argument. This disambiguator is then fed into
|
//! `-C metadata` command-line argument. This disambiguator is then fed into
|
||||||
//! the symbol hash of every exported item. Consequently, the symbols in two
|
//! the symbol hash of every exported item. Consequently, the symbols in two
|
||||||
//! identical crates but with different disambiguators are not in conflict
|
//! identical crates but with different disambiguators are not in conflict
|
||||||
//! with each other. This facility is mainly intended to be used by build
|
//! with each other. This facility is mainly intended to be used by build
|
||||||
|
|
|
@ -38,7 +38,7 @@ struct SccData<S: Idx> {
|
||||||
/// successors can be found.
|
/// successors can be found.
|
||||||
ranges: IndexVec<S, Range<usize>>,
|
ranges: IndexVec<S, Range<usize>>,
|
||||||
|
|
||||||
/// Contains the succcessors for all the Sccs, concatenated. The
|
/// Contains the successors for all the Sccs, concatenated. The
|
||||||
/// range of indices corresponding to a given SCC is found in its
|
/// range of indices corresponding to a given SCC is found in its
|
||||||
/// SccData.
|
/// SccData.
|
||||||
all_successors: Vec<S>,
|
all_successors: Vec<S>,
|
||||||
|
|
|
@ -452,7 +452,7 @@ impl<O, T: ?Sized> OwningRef<O, T> {
|
||||||
/// use owning_ref::{OwningRef, Erased};
|
/// use owning_ref::{OwningRef, Erased};
|
||||||
///
|
///
|
||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// // NB: Using the concrete types here for explicitnes.
|
/// // NB: Using the concrete types here for explicitness.
|
||||||
/// // For less verbose code type aliases like `BoxRef` are provided.
|
/// // For less verbose code type aliases like `BoxRef` are provided.
|
||||||
///
|
///
|
||||||
/// let owning_ref_a: OwningRef<Box<[i32; 4]>, [i32; 4]>
|
/// let owning_ref_a: OwningRef<Box<[i32; 4]>, [i32; 4]>
|
||||||
|
@ -722,7 +722,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
||||||
/// use owning_ref::{OwningRefMut, Erased};
|
/// use owning_ref::{OwningRefMut, Erased};
|
||||||
///
|
///
|
||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// // NB: Using the concrete types here for explicitnes.
|
/// // NB: Using the concrete types here for explicitness.
|
||||||
/// // For less verbose code type aliases like `BoxRef` are provided.
|
/// // For less verbose code type aliases like `BoxRef` are provided.
|
||||||
///
|
///
|
||||||
/// let owning_ref_mut_a: OwningRefMut<Box<[i32; 4]>, [i32; 4]>
|
/// let owning_ref_mut_a: OwningRefMut<Box<[i32; 4]>, [i32; 4]>
|
||||||
|
|
|
@ -15,7 +15,7 @@ use std::mem;
|
||||||
use std::ops::{RangeBounds, Bound, Index, IndexMut};
|
use std::ops::{RangeBounds, Bound, Index, IndexMut};
|
||||||
|
|
||||||
/// `SortedMap` is a data structure with similar characteristics as BTreeMap but
|
/// `SortedMap` is a data structure with similar characteristics as BTreeMap but
|
||||||
/// slightly different trade-offs: lookup, inseration, and removal are O(log(N))
|
/// slightly different trade-offs: lookup, insertion, and removal are O(log(N))
|
||||||
/// and elements can be iterated in order cheaply.
|
/// and elements can be iterated in order cheaply.
|
||||||
///
|
///
|
||||||
/// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it
|
/// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it
|
||||||
|
|
|
@ -643,8 +643,8 @@ impl Compilation {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A trait for customising the compilation process. Offers a number of hooks for
|
/// A trait for customizing the compilation process. Offers a number of hooks for
|
||||||
/// executing custom code or customising input.
|
/// executing custom code or customizing input.
|
||||||
pub trait CompilerCalls<'a> {
|
pub trait CompilerCalls<'a> {
|
||||||
/// Hook for a callback early in the process of handling arguments. This will
|
/// Hook for a callback early in the process of handling arguments. This will
|
||||||
/// be called straight after options have been parsed but before anything
|
/// be called straight after options have been parsed but before anything
|
||||||
|
|
|
@ -160,7 +160,7 @@ const LABELS_FN_IN_TRAIT: &[&[&str]] = &[
|
||||||
EXTRA_TRAIT,
|
EXTRA_TRAIT,
|
||||||
];
|
];
|
||||||
|
|
||||||
/// For generic cases like inline-assemply/mod/etc
|
/// For generic cases like inline-assembly/mod/etc
|
||||||
const LABELS_HIR_ONLY: &[&[&str]] = &[
|
const LABELS_HIR_ONLY: &[&[&str]] = &[
|
||||||
BASE_HIR,
|
BASE_HIR,
|
||||||
];
|
];
|
||||||
|
|
|
@ -1486,7 +1486,7 @@ declare_lint! {
|
||||||
"detects edition keywords being used as an identifier"
|
"detects edition keywords being used as an identifier"
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks for uses of edtion keywords used as an identifier
|
/// Checks for uses of edition keywords used as an identifier
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct KeywordIdents;
|
pub struct KeywordIdents;
|
||||||
|
|
||||||
|
|
|
@ -111,7 +111,7 @@ mod relate_tys;
|
||||||
/// - `liveness` -- results of a liveness computation on the MIR; used to create liveness
|
/// - `liveness` -- results of a liveness computation on the MIR; used to create liveness
|
||||||
/// constraints for the regions in the types of variables
|
/// constraints for the regions in the types of variables
|
||||||
/// - `flow_inits` -- results of a maybe-init dataflow analysis
|
/// - `flow_inits` -- results of a maybe-init dataflow analysis
|
||||||
/// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysiss
|
/// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis
|
||||||
pub(crate) fn type_check<'gcx, 'tcx>(
|
pub(crate) fn type_check<'gcx, 'tcx>(
|
||||||
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
|
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
|
||||||
param_env: ty::ParamEnv<'gcx>,
|
param_env: ty::ParamEnv<'gcx>,
|
||||||
|
|
|
@ -262,7 +262,7 @@ struct PlaceComponents<'p, 'tcx: 'p> {
|
||||||
impl<'p, 'tcx> PlaceComponents<'p, 'tcx> {
|
impl<'p, 'tcx> PlaceComponents<'p, 'tcx> {
|
||||||
/// Converts a list of `Place` components into an iterator; this
|
/// Converts a list of `Place` components into an iterator; this
|
||||||
/// iterator yields up a never-ending stream of `Option<&Place>`.
|
/// iterator yields up a never-ending stream of `Option<&Place>`.
|
||||||
/// These begin with the "innermst" place and then with each
|
/// These begin with the "innermost" place and then with each
|
||||||
/// projection therefrom. So given a place like `a.b.c` it would
|
/// projection therefrom. So given a place like `a.b.c` it would
|
||||||
/// yield up:
|
/// yield up:
|
||||||
///
|
///
|
||||||
|
|
|
@ -30,7 +30,7 @@ use std::cmp::Ordering;
|
||||||
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||||
/// Identifies what test is needed to decide if `match_pair` is applicable.
|
/// Identifies what test is needed to decide if `match_pair` is applicable.
|
||||||
///
|
///
|
||||||
/// It is a bug to call this with a simplifyable pattern.
|
/// It is a bug to call this with a simplifiable pattern.
|
||||||
pub fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> {
|
pub fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> {
|
||||||
match *match_pair.pattern.kind {
|
match *match_pair.pattern.kind {
|
||||||
PatternKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => {
|
PatternKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => {
|
||||||
|
|
|
@ -115,7 +115,7 @@ pub enum StmtKind<'tcx> {
|
||||||
/// reference to an expression in this enum is an `ExprRef<'tcx>`, which
|
/// reference to an expression in this enum is an `ExprRef<'tcx>`, which
|
||||||
/// may in turn be another instance of this enum (boxed), or else an
|
/// may in turn be another instance of this enum (boxed), or else an
|
||||||
/// unlowered `&'tcx H::Expr`. Note that instances of `Expr` are very
|
/// unlowered `&'tcx H::Expr`. Note that instances of `Expr` are very
|
||||||
/// shortlived. They are created by `Hair::to_expr`, analyzed and
|
/// short-lived. They are created by `Hair::to_expr`, analyzed and
|
||||||
/// converted into MIR, and then discarded.
|
/// converted into MIR, and then discarded.
|
||||||
///
|
///
|
||||||
/// If you compare `Expr` to the full compiler AST, you will see it is
|
/// If you compare `Expr` to the full compiler AST, you will see it is
|
||||||
|
|
|
@ -55,11 +55,11 @@
|
||||||
/// all the values it covers are already covered by row 2.
|
/// all the values it covers are already covered by row 2.
|
||||||
///
|
///
|
||||||
/// To compute `U`, we must have two other concepts.
|
/// To compute `U`, we must have two other concepts.
|
||||||
/// 1. `S(c, P)` is a "specialised matrix", where `c` is a constructor (like `Some` or
|
/// 1. `S(c, P)` is a "specialized matrix", where `c` is a constructor (like `Some` or
|
||||||
/// `None`). You can think of it as filtering `P` to just the rows whose *first* pattern
|
/// `None`). You can think of it as filtering `P` to just the rows whose *first* pattern
|
||||||
/// can cover `c` (and expanding OR-patterns into distinct patterns), and then expanding
|
/// can cover `c` (and expanding OR-patterns into distinct patterns), and then expanding
|
||||||
/// the constructor into all of its components.
|
/// the constructor into all of its components.
|
||||||
/// The specialisation of a row vector is computed by `specialize`.
|
/// The specialization of a row vector is computed by `specialize`.
|
||||||
///
|
///
|
||||||
/// It is computed as follows. For each row `p_i` of P, we have four cases:
|
/// It is computed as follows. For each row `p_i` of P, we have four cases:
|
||||||
/// 1.1. `p_(i,1) = c(r_1, .., r_a)`. Then `S(c, P)` has a corresponding row:
|
/// 1.1. `p_(i,1) = c(r_1, .., r_a)`. Then `S(c, P)` has a corresponding row:
|
||||||
|
@ -1453,7 +1453,7 @@ fn should_treat_range_exhaustively(tcx: TyCtxt<'_, 'tcx, 'tcx>, ctor: &Construct
|
||||||
/// mean creating a separate constructor for every single value in the range, which is clearly
|
/// mean creating a separate constructor for every single value in the range, which is clearly
|
||||||
/// impractical. However, observe that for some ranges of integers, the specialisation will be
|
/// impractical. However, observe that for some ranges of integers, the specialisation will be
|
||||||
/// identical across all values in that range (i.e. there are equivalence classes of ranges of
|
/// identical across all values in that range (i.e. there are equivalence classes of ranges of
|
||||||
/// constructors based on their `is_useful_specialised` outcome). These classes are grouped by
|
/// constructors based on their `is_useful_specialized` outcome). These classes are grouped by
|
||||||
/// the patterns that apply to them (in the matrix `P`). We can split the range whenever the
|
/// the patterns that apply to them (in the matrix `P`). We can split the range whenever the
|
||||||
/// patterns that apply to that range (specifically: the patterns that *intersect* with that range)
|
/// patterns that apply to that range (specifically: the patterns that *intersect* with that range)
|
||||||
/// change.
|
/// change.
|
||||||
|
|
|
@ -67,7 +67,7 @@ macro_rules! try_validation {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We want to show a nice path to the invalid field for diagnotsics,
|
/// We want to show a nice path to the invalid field for diagnostics,
|
||||||
/// but avoid string operations in the happy case where no error happens.
|
/// but avoid string operations in the happy case where no error happens.
|
||||||
/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
|
/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
|
||||||
/// need to later print something for the user.
|
/// need to later print something for the user.
|
||||||
|
|
|
@ -172,7 +172,7 @@ impl<'a, 'gcx> CheckCrateVisitor<'a, 'gcx> {
|
||||||
/// While the `ExprUseVisitor` walks, we will identify which
|
/// While the `ExprUseVisitor` walks, we will identify which
|
||||||
/// expressions are borrowed, and insert their ids into this
|
/// expressions are borrowed, and insert their ids into this
|
||||||
/// table. Actually, we insert the "borrow-id", which is normally
|
/// table. Actually, we insert the "borrow-id", which is normally
|
||||||
/// the id of the expession being borrowed: but in the case of
|
/// the id of the expression being borrowed: but in the case of
|
||||||
/// `ref mut` borrows, the `id` of the pattern is
|
/// `ref mut` borrows, the `id` of the pattern is
|
||||||
/// inserted. Therefore later we remove that entry from the table
|
/// inserted. Therefore later we remove that entry from the table
|
||||||
/// and transfer it over to the value being matched. This will
|
/// and transfer it over to the value being matched. This will
|
||||||
|
|
|
@ -1437,7 +1437,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
|
||||||
///
|
///
|
||||||
/// 1. Because the numbers of the region variables would otherwise be fairly unique to this
|
/// 1. Because the numbers of the region variables would otherwise be fairly unique to this
|
||||||
/// particular method call, it winds up creating fewer types overall, which helps for memory
|
/// particular method call, it winds up creating fewer types overall, which helps for memory
|
||||||
/// usage. (Admittedly, this is a rather small effect, though measureable.)
|
/// usage. (Admittedly, this is a rather small effect, though measurable.)
|
||||||
///
|
///
|
||||||
/// 2. It makes it easier to deal with higher-ranked trait bounds, because we can replace any
|
/// 2. It makes it easier to deal with higher-ranked trait bounds, because we can replace any
|
||||||
/// late-bound regions with 'static. Otherwise, if we were going to replace late-bound
|
/// late-bound regions with 'static. Otherwise, if we were going to replace late-bound
|
||||||
|
|
|
@ -187,7 +187,7 @@ pub struct RenderOptions {
|
||||||
/// Whether to generate a table of contents on the output file when reading a standalone
|
/// Whether to generate a table of contents on the output file when reading a standalone
|
||||||
/// Markdown file.
|
/// Markdown file.
|
||||||
pub markdown_no_toc: bool,
|
pub markdown_no_toc: bool,
|
||||||
/// Additional CSS files to link in pages generated from standlone Markdown files.
|
/// Additional CSS files to link in pages generated from standalone Markdown files.
|
||||||
pub markdown_css: Vec<String>,
|
pub markdown_css: Vec<String>,
|
||||||
/// If present, playground URL to use in the "Run" button added to code samples generated from
|
/// If present, playground URL to use in the "Run" button added to code samples generated from
|
||||||
/// standalone Markdown files. If not present, `playground_url` is used.
|
/// standalone Markdown files. If not present, `playground_url` is used.
|
||||||
|
|
|
@ -85,7 +85,7 @@ pub use alloc_crate::alloc::*;
|
||||||
/// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
|
/// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
|
||||||
/// plus related functions.
|
/// plus related functions.
|
||||||
///
|
///
|
||||||
/// This type implements the `GlobalAlloc` trait and Rust programs by deafult
|
/// This type implements the `GlobalAlloc` trait and Rust programs by default
|
||||||
/// work as if they had this definition:
|
/// work as if they had this definition:
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
|
|
|
@ -1569,7 +1569,7 @@ impl<K, V, S> HashMap<K, V, S>
|
||||||
/// where the key should go, meaning the keys may become "lost" if their
|
/// where the key should go, meaning the keys may become "lost" if their
|
||||||
/// location does not reflect their state. For instance, if you change a key
|
/// location does not reflect their state. For instance, if you change a key
|
||||||
/// so that the map now contains keys which compare equal, search may start
|
/// so that the map now contains keys which compare equal, search may start
|
||||||
/// acting eratically, with two keys randomly masking eachother. Implementations
|
/// acting erratically, with two keys randomly masking each other. Implementations
|
||||||
/// are free to assume this doesn't happen (within the limits of memory-safety).
|
/// are free to assume this doesn't happen (within the limits of memory-safety).
|
||||||
#[unstable(feature = "hash_raw_entry", issue = "54043")]
|
#[unstable(feature = "hash_raw_entry", issue = "54043")]
|
||||||
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<K, V, S> {
|
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<K, V, S> {
|
||||||
|
|
|
@ -615,7 +615,7 @@ mod loop_keyword { }
|
||||||
//
|
//
|
||||||
/// The keyword used to define structs.
|
/// The keyword used to define structs.
|
||||||
///
|
///
|
||||||
/// Structs in Rust come in three flavours: Structs with named fields, tuple structs, and unit
|
/// Structs in Rust come in three flavors: Structs with named fields, tuple structs, and unit
|
||||||
/// structs.
|
/// structs.
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
|
|
|
@ -852,7 +852,7 @@ impl From<[u8; 4]> for IpAddr {
|
||||||
impl Ipv6Addr {
|
impl Ipv6Addr {
|
||||||
/// Creates a new IPv6 address from eight 16-bit segments.
|
/// Creates a new IPv6 address from eight 16-bit segments.
|
||||||
///
|
///
|
||||||
/// The result will represent the IP address a:b:c:d:e:f:g:h.
|
/// The result will represent the IP address `a:b:c:d:e:f:g:h`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
|
|
@ -1119,7 +1119,7 @@ impl From<fs::File> for Stdio {
|
||||||
/// let file = File::open("foo.txt").unwrap();
|
/// let file = File::open("foo.txt").unwrap();
|
||||||
///
|
///
|
||||||
/// let reverse = Command::new("rev")
|
/// let reverse = Command::new("rev")
|
||||||
/// .stdin(file) // Implicit File convertion into a Stdio
|
/// .stdin(file) // Implicit File conversion into a Stdio
|
||||||
/// .output()
|
/// .output()
|
||||||
/// .expect("failed reverse command");
|
/// .expect("failed reverse command");
|
||||||
///
|
///
|
||||||
|
@ -1337,7 +1337,7 @@ impl Child {
|
||||||
/// Attempts to collect the exit status of the child if it has already
|
/// Attempts to collect the exit status of the child if it has already
|
||||||
/// exited.
|
/// exited.
|
||||||
///
|
///
|
||||||
/// This function will not block the calling thread and will only advisorily
|
/// This function will not block the calling thread and will only
|
||||||
/// check to see if the child process has exited or not. If the child has
|
/// check to see if the child process has exited or not. If the child has
|
||||||
/// exited then on Unix the process id is reaped. This function is
|
/// exited then on Unix the process id is reaped. This function is
|
||||||
/// guaranteed to repeatedly return a successful exit status so long as the
|
/// guaranteed to repeatedly return a successful exit status so long as the
|
||||||
|
|
|
@ -26,7 +26,7 @@ impl FileDesc {
|
||||||
|
|
||||||
pub fn raw(&self) -> usize { self.fd }
|
pub fn raw(&self) -> usize { self.fd }
|
||||||
|
|
||||||
/// Extracts the actual filedescriptor without closing it.
|
/// Extracts the actual file descriptor without closing it.
|
||||||
pub fn into_raw(self) -> usize {
|
pub fn into_raw(self) -> usize {
|
||||||
let fd = self.fd;
|
let fd = self.fd;
|
||||||
mem::forget(self);
|
mem::forget(self);
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub unsafe fn brk(addr: usize) -> Result<usize> {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
///
|
||||||
/// * `EACCES` - permission is denied for one of the components of `path`, or `path`
|
/// * `EACCES` - permission is denied for one of the components of `path`, or `path`
|
||||||
/// * `EFAULT` - `path` does not point to the process's addressible memory
|
/// * `EFAULT` - `path` does not point to the process's addressable memory
|
||||||
/// * `EIO` - an I/O error occurred
|
/// * `EIO` - an I/O error occurred
|
||||||
/// * `ENOENT` - `path` does not exit
|
/// * `ENOENT` - `path` does not exit
|
||||||
/// * `ENOTDIR` - `path` is not a directory
|
/// * `ENOTDIR` - `path` is not a directory
|
||||||
|
@ -347,7 +347,7 @@ pub fn waitpid(pid: usize, status: &mut usize, options: usize) -> Result<usize>
|
||||||
///
|
///
|
||||||
/// * `EAGAIN` - the file descriptor was opened with `O_NONBLOCK` and writing would block
|
/// * `EAGAIN` - the file descriptor was opened with `O_NONBLOCK` and writing would block
|
||||||
/// * `EBADF` - the file descriptor is not valid or is not open for writing
|
/// * `EBADF` - the file descriptor is not valid or is not open for writing
|
||||||
/// * `EFAULT` - `buf` does not point to the process's addressible memory
|
/// * `EFAULT` - `buf` does not point to the process's addressable memory
|
||||||
/// * `EIO` - an I/O error occurred
|
/// * `EIO` - an I/O error occurred
|
||||||
/// * `ENOSPC` - the device containing the file descriptor has no room for data
|
/// * `ENOSPC` - the device containing the file descriptor has no room for data
|
||||||
/// * `EPIPE` - the file descriptor refers to a pipe or socket whose reading end is closed
|
/// * `EPIPE` - the file descriptor refers to a pipe or socket whose reading end is closed
|
||||||
|
|
|
@ -46,7 +46,7 @@ impl FileDesc {
|
||||||
|
|
||||||
pub fn raw(&self) -> c_int { self.fd }
|
pub fn raw(&self) -> c_int { self.fd }
|
||||||
|
|
||||||
/// Extracts the actual filedescriptor without closing it.
|
/// Extracts the actual file descriptor without closing it.
|
||||||
pub fn into_raw(self) -> c_int {
|
pub fn into_raw(self) -> c_int {
|
||||||
let fd = self.fd;
|
let fd = self.fd;
|
||||||
mem::forget(self);
|
mem::forget(self);
|
||||||
|
|
|
@ -233,7 +233,7 @@ pub use self::local::{LocalKey, AccessError};
|
||||||
///
|
///
|
||||||
/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
|
/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
|
||||||
/// to recover from a failure to launch a thread, indeed the free function will
|
/// to recover from a failure to launch a thread, indeed the free function will
|
||||||
/// panick where the `Builder` method will return a [`io::Result`].
|
/// panic where the `Builder` method will return a [`io::Result`].
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
|
|
@ -150,7 +150,7 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Modify the `TokenTree`'s span inplace.
|
/// Modify the `TokenTree`'s span in-place.
|
||||||
pub fn set_span(&mut self, span: Span) {
|
pub fn set_span(&mut self, span: Span) {
|
||||||
match *self {
|
match *self {
|
||||||
TokenTree::Token(ref mut sp, _) => *sp = span,
|
TokenTree::Token(ref mut sp, _) => *sp = span,
|
||||||
|
|
|
@ -68,7 +68,7 @@
|
||||||
//! The `i32`s in `B` and `C0` don't have an identifier, so the
|
//! The `i32`s in `B` and `C0` don't have an identifier, so the
|
||||||
//! `Option<ident>`s would be `None` for them.
|
//! `Option<ident>`s would be `None` for them.
|
||||||
//!
|
//!
|
||||||
//! In the static cases, the structure is summarised, either into the just
|
//! In the static cases, the structure is summarized, either into the just
|
||||||
//! spans of the fields or a list of spans and the field idents (for tuple
|
//! spans of the fields or a list of spans and the field idents (for tuple
|
||||||
//! structs and record structs, respectively), or a list of these, for
|
//! structs and record structs, respectively), or a list of these, for
|
||||||
//! enums (one for each variant). For empty struct and empty enum
|
//! enums (one for each variant). For empty struct and empty enum
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue