Merge from rustc
This commit is contained in:
commit
3942cdf1bf
535 changed files with 2707 additions and 2616 deletions
|
@ -3250,6 +3250,7 @@ dependencies = [
|
|||
"tempfile",
|
||||
"thorin-dwp",
|
||||
"tracing",
|
||||
"windows 0.46.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3641,6 +3642,7 @@ dependencies = [
|
|||
"rustc_plugin_impl",
|
||||
"rustc_privacy",
|
||||
"rustc_query_impl",
|
||||
"rustc_query_system",
|
||||
"rustc_resolve",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
|
@ -3770,6 +3772,7 @@ dependencies = [
|
|||
"derive_more",
|
||||
"either",
|
||||
"gsgdt",
|
||||
"measureme",
|
||||
"polonius-engine",
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
|
|
|
@ -1430,8 +1430,8 @@ pub enum ExprKind {
|
|||
/// The async block used to have a `NodeId`, which was removed in favor of
|
||||
/// using the parent `NodeId` of the parent `Expr`.
|
||||
Async(CaptureBy, P<Block>),
|
||||
/// An await expression (`my_future.await`).
|
||||
Await(P<Expr>),
|
||||
/// An await expression (`my_future.await`). Span is of await keyword.
|
||||
Await(P<Expr>, Span),
|
||||
|
||||
/// A try block (`try { ... }`).
|
||||
TryBlock(P<Block>),
|
||||
|
|
|
@ -1415,7 +1415,10 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||
ExprKind::Async(_capture_by, body) => {
|
||||
vis.visit_block(body);
|
||||
}
|
||||
ExprKind::Await(expr) => vis.visit_expr(expr),
|
||||
ExprKind::Await(expr, await_kw_span) => {
|
||||
vis.visit_expr(expr);
|
||||
vis.visit_span(await_kw_span);
|
||||
}
|
||||
ExprKind::Assign(el, er, _) => {
|
||||
vis.visit_expr(el);
|
||||
vis.visit_expr(er);
|
||||
|
|
|
@ -388,7 +388,7 @@ pub fn contains_exterior_struct_lit(value: &ast::Expr) -> bool {
|
|||
// X { y: 1 } + X { y: 2 }
|
||||
contains_exterior_struct_lit(lhs) || contains_exterior_struct_lit(rhs)
|
||||
}
|
||||
ast::ExprKind::Await(x)
|
||||
ast::ExprKind::Await(x, _)
|
||||
| ast::ExprKind::Unary(_, x)
|
||||
| ast::ExprKind::Cast(x, _)
|
||||
| ast::ExprKind::Type(x, _)
|
||||
|
|
|
@ -864,7 +864,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||
ExprKind::Async(_, body) => {
|
||||
visitor.visit_block(body);
|
||||
}
|
||||
ExprKind::Await(expr) => visitor.visit_expr(expr),
|
||||
ExprKind::Await(expr, _) => visitor.visit_expr(expr),
|
||||
ExprKind::Assign(lhs, rhs, _) => {
|
||||
visitor.visit_expr(lhs);
|
||||
visitor.visit_expr(rhs);
|
||||
|
|
|
@ -108,7 +108,7 @@ pub struct BaseExpressionDoubleDot {
|
|||
pub struct AwaitOnlyInAsyncFnAndBlocks {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub dot_await_span: Span,
|
||||
pub await_kw_span: Span,
|
||||
#[label(ast_lowering_this_not_async)]
|
||||
pub item_span: Option<Span>,
|
||||
}
|
||||
|
|
|
@ -185,21 +185,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::AsyncGeneratorKind::Block,
|
||||
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
||||
),
|
||||
ExprKind::Await(expr) => {
|
||||
let dot_await_span = if expr.span.hi() < e.span.hi() {
|
||||
let span_with_whitespace = self
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_extend_while(expr.span, char::is_whitespace)
|
||||
.unwrap_or(expr.span);
|
||||
span_with_whitespace.shrink_to_hi().with_hi(e.span.hi())
|
||||
} else {
|
||||
// this is a recovered `await expr`
|
||||
e.span
|
||||
};
|
||||
self.lower_expr_await(dot_await_span, expr)
|
||||
}
|
||||
ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr),
|
||||
ExprKind::Closure(box Closure {
|
||||
binder,
|
||||
capture_clause,
|
||||
|
@ -710,18 +696,18 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
fn lower_expr_await(&mut self, dot_await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
|
||||
let full_span = expr.span.to(dot_await_span);
|
||||
fn lower_expr_await(&mut self, await_kw_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
|
||||
let full_span = expr.span.to(await_kw_span);
|
||||
match self.generator_kind {
|
||||
Some(hir::GeneratorKind::Async(_)) => {}
|
||||
Some(hir::GeneratorKind::Gen) | None => {
|
||||
self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
|
||||
dot_await_span,
|
||||
await_kw_span,
|
||||
item_span: self.current_item,
|
||||
});
|
||||
}
|
||||
}
|
||||
let span = self.mark_span_with_reason(DesugaringKind::Await, dot_await_span, None);
|
||||
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, None);
|
||||
let gen_future_span = self.mark_span_with_reason(
|
||||
DesugaringKind::Await,
|
||||
full_span,
|
||||
|
|
|
@ -583,7 +583,7 @@ fn may_contain_yield_point(e: &ast::Expr) -> bool {
|
|||
|
||||
impl Visitor<'_> for MayContainYieldPoint {
|
||||
fn visit_expr(&mut self, e: &ast::Expr) {
|
||||
if let ast::ExprKind::Await(_) | ast::ExprKind::Yield(_) = e.kind {
|
||||
if let ast::ExprKind::Await(_, _) | ast::ExprKind::Yield(_) = e.kind {
|
||||
self.0 = true;
|
||||
} else {
|
||||
visit::walk_expr(self, e);
|
||||
|
|
|
@ -447,7 +447,7 @@ impl<'a> State<'a> {
|
|||
self.ibox(0);
|
||||
self.print_block_with_attrs(blk, attrs);
|
||||
}
|
||||
ast::ExprKind::Await(expr) => {
|
||||
ast::ExprKind::Await(expr, _) => {
|
||||
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
|
||||
self.word(".await");
|
||||
}
|
||||
|
@ -566,7 +566,7 @@ impl<'a> State<'a> {
|
|||
self.print_ident(field);
|
||||
}
|
||||
}
|
||||
|
||||
self.pclose();
|
||||
self.end();
|
||||
}
|
||||
ast::ExprKind::MacCall(m) => self.print_mac(m),
|
||||
|
|
|
@ -203,6 +203,15 @@ borrowck_moved_due_to_method_call =
|
|||
*[false] call
|
||||
}
|
||||
|
||||
borrowck_moved_due_to_await =
|
||||
{$place_name} {$is_partial ->
|
||||
[true] partially moved
|
||||
*[false] moved
|
||||
} due to this {$is_loop_message ->
|
||||
[true] await, in previous iteration of loop
|
||||
*[false] await
|
||||
}
|
||||
|
||||
borrowck_value_moved_here =
|
||||
value {$is_partial ->
|
||||
[true] partially moved
|
||||
|
|
|
@ -1085,12 +1085,21 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
err.subdiagnostic(CaptureReasonLabel::MethodCall {
|
||||
fn_call_span,
|
||||
place_name: &place_name,
|
||||
is_partial,
|
||||
is_loop_message,
|
||||
});
|
||||
if let Some((CallDesugaringKind::Await, _)) = desugaring {
|
||||
err.subdiagnostic(CaptureReasonLabel::Await {
|
||||
fn_call_span,
|
||||
place_name: &place_name,
|
||||
is_partial,
|
||||
is_loop_message,
|
||||
});
|
||||
} else {
|
||||
err.subdiagnostic(CaptureReasonLabel::MethodCall {
|
||||
fn_call_span,
|
||||
place_name: &place_name,
|
||||
is_partial,
|
||||
is_loop_message,
|
||||
});
|
||||
}
|
||||
// Erase and shadow everything that could be passed to the new infcx.
|
||||
let ty = moved_place.ty(self.body, tcx).ty;
|
||||
|
||||
|
|
|
@ -338,6 +338,14 @@ pub(crate) enum CaptureReasonLabel<'a> {
|
|||
is_partial: bool,
|
||||
is_loop_message: bool,
|
||||
},
|
||||
#[label(borrowck_moved_due_to_await)]
|
||||
Await {
|
||||
#[primary_span]
|
||||
fn_call_span: Span,
|
||||
place_name: &'a str,
|
||||
is_partial: bool,
|
||||
is_loop_message: bool,
|
||||
},
|
||||
#[label(borrowck_value_moved_here)]
|
||||
MovedHere {
|
||||
#[primary_span]
|
||||
|
|
|
@ -169,5 +169,40 @@ builtin_macros_asm_pure_no_output = asm with the `pure` option must have at leas
|
|||
|
||||
builtin_macros_asm_modifier_invalid = asm template modifier must be a single character
|
||||
|
||||
builtin_macros_asm_requires_template = requires at least a template string argument
|
||||
|
||||
builtin_macros_asm_expected_comma = expected token: `,`
|
||||
.label = expected `,`
|
||||
|
||||
builtin_macros_asm_underscore_input = _ cannot be used for input operands
|
||||
|
||||
builtin_macros_asm_sym_no_path = expected a path for argument to `sym`
|
||||
|
||||
builtin_macros_asm_expected_other = expected operand, {$is_global_asm ->
|
||||
[true] options
|
||||
*[false] clobber_abi, options
|
||||
}, or additional template string
|
||||
|
||||
builtin_macros_asm_duplicate_arg = duplicate argument named `{$name}`
|
||||
.label = previously here
|
||||
.arg = duplicate argument
|
||||
|
||||
builtin_macros_asm_pos_after = positional arguments cannot follow named arguments or explicit register arguments
|
||||
.pos = positional argument
|
||||
.named = named argument
|
||||
.explicit = explicit register argument
|
||||
|
||||
builtin_macros_asm_noreturn = asm outputs are not allowed with the `noreturn` option
|
||||
|
||||
builtin_macros_global_asm_clobber_abi = `clobber_abi` cannot be used with `global_asm!`
|
||||
|
||||
builtin_macros_asm_clobber_no_reg = asm with `clobber_abi` must specify explicit registers for outputs
|
||||
builtin_macros_asm_clobber_abi = clobber_abi
|
||||
builtin_macros_asm_clobber_outputs = generic outputs
|
||||
|
||||
builtin_macros_asm_opt_already_provided = the `{$symbol}` option was already provided
|
||||
.label = this option was already provided
|
||||
.suggestion = remove this option
|
||||
|
||||
builtin_macros_test_runner_invalid = `test_runner` argument must be a path
|
||||
builtin_macros_test_runner_nargs = `#![test_runner(..)]` accepts exactly 1 argument
|
||||
|
|
|
@ -3,7 +3,7 @@ use rustc_ast::ptr::P;
|
|||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_errors::{Applicability, PResult};
|
||||
use rustc_errors::PResult;
|
||||
use rustc_expand::base::{self, *};
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse_format as parse;
|
||||
|
@ -49,7 +49,7 @@ pub fn parse_asm_args<'a>(
|
|||
let diag = &sess.span_diagnostic;
|
||||
|
||||
if p.token == token::Eof {
|
||||
return Err(diag.struct_span_err(sp, "requires at least a template string argument"));
|
||||
return Err(diag.create_err(errors::AsmRequiresTemplate { span: sp }));
|
||||
}
|
||||
|
||||
let first_template = p.parse_expr()?;
|
||||
|
@ -68,8 +68,7 @@ pub fn parse_asm_args<'a>(
|
|||
if !p.eat(&token::Comma) {
|
||||
if allow_templates {
|
||||
// After a template string, we always expect *only* a comma...
|
||||
let mut err = diag.struct_span_err(p.token.span, "expected token: `,`");
|
||||
err.span_label(p.token.span, "expected `,`");
|
||||
let mut err = diag.create_err(errors::AsmExpectedComma { span: p.token.span });
|
||||
p.maybe_annotate_with_ascription(&mut err, false);
|
||||
return Err(err);
|
||||
} else {
|
||||
|
@ -112,7 +111,7 @@ pub fn parse_asm_args<'a>(
|
|||
let op = if !is_global_asm && p.eat_keyword(kw::In) {
|
||||
let reg = parse_reg(p, &mut explicit_reg)?;
|
||||
if p.eat_keyword(kw::Underscore) {
|
||||
let err = diag.struct_span_err(p.token.span, "_ cannot be used for input operands");
|
||||
let err = diag.create_err(errors::AsmUnderscoreInput { span: p.token.span });
|
||||
return Err(err);
|
||||
}
|
||||
let expr = p.parse_expr()?;
|
||||
|
@ -128,7 +127,7 @@ pub fn parse_asm_args<'a>(
|
|||
} else if !is_global_asm && p.eat_keyword(sym::inout) {
|
||||
let reg = parse_reg(p, &mut explicit_reg)?;
|
||||
if p.eat_keyword(kw::Underscore) {
|
||||
let err = diag.struct_span_err(p.token.span, "_ cannot be used for input operands");
|
||||
let err = diag.create_err(errors::AsmUnderscoreInput { span: p.token.span });
|
||||
return Err(err);
|
||||
}
|
||||
let expr = p.parse_expr()?;
|
||||
|
@ -142,7 +141,7 @@ pub fn parse_asm_args<'a>(
|
|||
} else if !is_global_asm && p.eat_keyword(sym::inlateout) {
|
||||
let reg = parse_reg(p, &mut explicit_reg)?;
|
||||
if p.eat_keyword(kw::Underscore) {
|
||||
let err = diag.struct_span_err(p.token.span, "_ cannot be used for input operands");
|
||||
let err = diag.create_err(errors::AsmUnderscoreInput { span: p.token.span });
|
||||
return Err(err);
|
||||
}
|
||||
let expr = p.parse_expr()?;
|
||||
|
@ -160,7 +159,7 @@ pub fn parse_asm_args<'a>(
|
|||
let expr = p.parse_expr()?;
|
||||
let ast::ExprKind::Path(qself, path) = &expr.kind else {
|
||||
let err = diag
|
||||
.struct_span_err(expr.span, "expected a path for argument to `sym`");
|
||||
.create_err(errors::AsmSymNoPath { span: expr.span });
|
||||
return Err(err);
|
||||
};
|
||||
let sym = ast::InlineAsmSym {
|
||||
|
@ -181,13 +180,10 @@ pub fn parse_asm_args<'a>(
|
|||
) => {}
|
||||
ast::ExprKind::MacCall(..) => {}
|
||||
_ => {
|
||||
let errstr = if is_global_asm {
|
||||
"expected operand, options, or additional template string"
|
||||
} else {
|
||||
"expected operand, clobber_abi, options, or additional template string"
|
||||
};
|
||||
let mut err = diag.struct_span_err(template.span, errstr);
|
||||
err.span_label(template.span, errstr);
|
||||
let err = diag.create_err(errors::AsmExpectedOther {
|
||||
span: template.span,
|
||||
is_global_asm,
|
||||
});
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
|
@ -212,28 +208,16 @@ pub fn parse_asm_args<'a>(
|
|||
args.reg_args.insert(slot);
|
||||
} else if let Some(name) = name {
|
||||
if let Some(&prev) = args.named_args.get(&name) {
|
||||
diag.struct_span_err(span, &format!("duplicate argument named `{}`", name))
|
||||
.span_label(args.operands[prev].1, "previously here")
|
||||
.span_label(span, "duplicate argument")
|
||||
.emit();
|
||||
diag.emit_err(errors::AsmDuplicateArg { span, name, prev: args.operands[prev].1 });
|
||||
continue;
|
||||
}
|
||||
args.named_args.insert(name, slot);
|
||||
} else {
|
||||
if !args.named_args.is_empty() || !args.reg_args.is_empty() {
|
||||
let mut err = diag.struct_span_err(
|
||||
span,
|
||||
"positional arguments cannot follow named arguments \
|
||||
or explicit register arguments",
|
||||
);
|
||||
err.span_label(span, "positional argument");
|
||||
for pos in args.named_args.values() {
|
||||
err.span_label(args.operands[*pos].1, "named argument");
|
||||
}
|
||||
for pos in &args.reg_args {
|
||||
err.span_label(args.operands[*pos].1, "explicit register argument");
|
||||
}
|
||||
err.emit();
|
||||
let named = args.named_args.values().map(|p| args.operands[*p].1).collect();
|
||||
let explicit = args.reg_args.iter().map(|p| args.operands[*p].1).collect();
|
||||
|
||||
diag.emit_err(errors::AsmPositionalAfter { span, named, explicit });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -284,34 +268,25 @@ pub fn parse_asm_args<'a>(
|
|||
diag.emit_err(errors::AsmPureNoOutput { spans: args.options_spans.clone() });
|
||||
}
|
||||
if args.options.contains(ast::InlineAsmOptions::NORETURN) && !outputs_sp.is_empty() {
|
||||
let err = diag
|
||||
.struct_span_err(outputs_sp, "asm outputs are not allowed with the `noreturn` option");
|
||||
|
||||
let err = diag.create_err(errors::AsmNoReturn { outputs_sp });
|
||||
// Bail out now since this is likely to confuse MIR
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
if args.clobber_abis.len() > 0 {
|
||||
if is_global_asm {
|
||||
let err = diag.struct_span_err(
|
||||
args.clobber_abis.iter().map(|(_, span)| *span).collect::<Vec<Span>>(),
|
||||
"`clobber_abi` cannot be used with `global_asm!`",
|
||||
);
|
||||
let err = diag.create_err(errors::GlobalAsmClobberAbi {
|
||||
spans: args.clobber_abis.iter().map(|(_, span)| *span).collect(),
|
||||
});
|
||||
|
||||
// Bail out now since this is likely to confuse later stages
|
||||
return Err(err);
|
||||
}
|
||||
if !regclass_outputs.is_empty() {
|
||||
diag.struct_span_err(
|
||||
regclass_outputs.clone(),
|
||||
"asm with `clobber_abi` must specify explicit registers for outputs",
|
||||
)
|
||||
.span_labels(
|
||||
args.clobber_abis.iter().map(|(_, span)| *span).collect::<Vec<Span>>(),
|
||||
"clobber_abi",
|
||||
)
|
||||
.span_labels(regclass_outputs, "generic outputs")
|
||||
.emit();
|
||||
diag.emit_err(errors::AsmClobberNoReg {
|
||||
spans: regclass_outputs,
|
||||
clobbers: args.clobber_abis.iter().map(|(_, span)| *span).collect(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -323,25 +298,9 @@ pub fn parse_asm_args<'a>(
|
|||
/// This function must be called immediately after the option token is parsed.
|
||||
/// Otherwise, the suggestion will be incorrect.
|
||||
fn err_duplicate_option(p: &mut Parser<'_>, symbol: Symbol, span: Span) {
|
||||
let mut err = p
|
||||
.sess
|
||||
.span_diagnostic
|
||||
.struct_span_err(span, &format!("the `{}` option was already provided", symbol));
|
||||
err.span_label(span, "this option was already provided");
|
||||
|
||||
// Tool-only output
|
||||
let mut full_span = span;
|
||||
if p.token.kind == token::Comma {
|
||||
full_span = full_span.to(p.token.span);
|
||||
}
|
||||
err.tool_only_span_suggestion(
|
||||
full_span,
|
||||
"remove this option",
|
||||
"",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
|
||||
err.emit();
|
||||
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
|
||||
p.sess.span_diagnostic.emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span });
|
||||
}
|
||||
|
||||
/// Try to set the provided option in the provided `AsmArgs`.
|
||||
|
|
|
@ -288,7 +288,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
ExprKind::Assign(_, _, _)
|
||||
| ExprKind::AssignOp(_, _, _)
|
||||
| ExprKind::Async(_, _)
|
||||
| ExprKind::Await(_)
|
||||
| ExprKind::Await(_, _)
|
||||
| ExprKind::Block(_, _)
|
||||
| ExprKind::Break(_, _)
|
||||
| ExprKind::Closure(_)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use rustc_errors::{
|
||||
AddToDiagnostic, EmissionGuarantee, IntoDiagnostic, MultiSpan, SingleLabelManySpans,
|
||||
AddToDiagnostic, DiagnosticBuilder, EmissionGuarantee, Handler, IntoDiagnostic, MultiSpan,
|
||||
SingleLabelManySpans,
|
||||
};
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
use rustc_span::{symbol::Ident, Span, Symbol};
|
||||
|
@ -370,11 +371,12 @@ pub(crate) struct EnvNotDefined {
|
|||
// Hand-written implementation to support custom user messages
|
||||
impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for EnvNotDefined {
|
||||
#[track_caller]
|
||||
fn into_diagnostic(
|
||||
self,
|
||||
handler: &'a rustc_errors::Handler,
|
||||
) -> rustc_errors::DiagnosticBuilder<'a, G> {
|
||||
fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, G> {
|
||||
let mut diag = if let Some(msg) = self.msg {
|
||||
#[expect(
|
||||
rustc::untranslatable_diagnostic,
|
||||
reason = "cannot translate user-provided messages"
|
||||
)]
|
||||
handler.struct_diagnostic(msg.as_str())
|
||||
} else {
|
||||
handler.struct_diagnostic(crate::fluent_generated::builtin_macros_env_not_defined)
|
||||
|
@ -606,6 +608,117 @@ pub(crate) struct AsmModifierInvalid {
|
|||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_requires_template)]
|
||||
pub(crate) struct AsmRequiresTemplate {
|
||||
#[primary_span]
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_expected_comma)]
|
||||
pub(crate) struct AsmExpectedComma {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_underscore_input)]
|
||||
pub(crate) struct AsmUnderscoreInput {
|
||||
#[primary_span]
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_sym_no_path)]
|
||||
pub(crate) struct AsmSymNoPath {
|
||||
#[primary_span]
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_expected_other)]
|
||||
pub(crate) struct AsmExpectedOther {
|
||||
#[primary_span]
|
||||
#[label(builtin_macros_asm_expected_other)]
|
||||
pub(crate) span: Span,
|
||||
pub(crate) is_global_asm: bool,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_duplicate_arg)]
|
||||
pub(crate) struct AsmDuplicateArg {
|
||||
#[primary_span]
|
||||
#[label(builtin_macros_arg)]
|
||||
pub(crate) span: Span,
|
||||
#[label]
|
||||
pub(crate) prev: Span,
|
||||
pub(crate) name: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_pos_after)]
|
||||
pub(crate) struct AsmPositionalAfter {
|
||||
#[primary_span]
|
||||
#[label(builtin_macros_pos)]
|
||||
pub(crate) span: Span,
|
||||
#[label(builtin_macros_named)]
|
||||
pub(crate) named: Vec<Span>,
|
||||
#[label(builtin_macros_explicit)]
|
||||
pub(crate) explicit: Vec<Span>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_noreturn)]
|
||||
pub(crate) struct AsmNoReturn {
|
||||
#[primary_span]
|
||||
pub(crate) outputs_sp: Vec<Span>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_global_asm_clobber_abi)]
|
||||
pub(crate) struct GlobalAsmClobberAbi {
|
||||
#[primary_span]
|
||||
pub(crate) spans: Vec<Span>,
|
||||
}
|
||||
|
||||
pub(crate) struct AsmClobberNoReg {
|
||||
pub(crate) spans: Vec<Span>,
|
||||
pub(crate) clobbers: Vec<Span>,
|
||||
}
|
||||
|
||||
impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for AsmClobberNoReg {
|
||||
fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, G> {
|
||||
let mut diag =
|
||||
handler.struct_diagnostic(crate::fluent_generated::builtin_macros_asm_clobber_no_reg);
|
||||
diag.set_span(self.spans.clone());
|
||||
// eager translation as `span_labels` takes `AsRef<str>`
|
||||
let lbl1 = handler.eagerly_translate_to_string(
|
||||
crate::fluent_generated::builtin_macros_asm_clobber_abi,
|
||||
[].into_iter(),
|
||||
);
|
||||
diag.span_labels(self.clobbers, &lbl1);
|
||||
let lbl2 = handler.eagerly_translate_to_string(
|
||||
crate::fluent_generated::builtin_macros_asm_clobber_outputs,
|
||||
[].into_iter(),
|
||||
);
|
||||
diag.span_labels(self.spans, &lbl2);
|
||||
diag
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_asm_opt_already_provided)]
|
||||
pub(crate) struct AsmOptAlreadyprovided {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub(crate) span: Span,
|
||||
pub(crate) symbol: Symbol,
|
||||
#[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")]
|
||||
pub(crate) full_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(builtin_macros_test_runner_invalid)]
|
||||
pub(crate) struct TestRunnerInvalid {
|
||||
|
|
|
@ -232,7 +232,7 @@ fn generate_test_harness(
|
|||
let expn_id = ext_cx.resolver.expansion_for_ast_pass(
|
||||
DUMMY_SP,
|
||||
AstPass::TestHarness,
|
||||
&[sym::test, sym::rustc_attrs],
|
||||
&[sym::test, sym::rustc_attrs, sym::no_coverage],
|
||||
None,
|
||||
);
|
||||
let def_site = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id());
|
||||
|
@ -313,6 +313,8 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
|||
|
||||
// #[rustc_main]
|
||||
let main_attr = ecx.attr_word(sym::rustc_main, sp);
|
||||
// #[no_coverage]
|
||||
let no_coverage_attr = ecx.attr_word(sym::no_coverage, sp);
|
||||
|
||||
// pub fn main() { ... }
|
||||
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(ThinVec::new()));
|
||||
|
@ -342,7 +344,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
|||
|
||||
let main = P(ast::Item {
|
||||
ident: main_id,
|
||||
attrs: thin_vec![main_attr],
|
||||
attrs: thin_vec![main_attr, no_coverage_attr],
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: main,
|
||||
vis: ast::Visibility { span: sp, kind: ast::VisibilityKind::Public, tokens: None },
|
||||
|
|
|
@ -49,3 +49,7 @@ libc = "0.2.50"
|
|||
version = "0.30.1"
|
||||
default-features = false
|
||||
features = ["read_core", "elf", "macho", "pe", "unaligned", "archive", "write"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies.windows]
|
||||
version = "0.46.0"
|
||||
features = ["Win32_Globalization"]
|
||||
|
|
|
@ -860,7 +860,7 @@ fn link_natively<'a>(
|
|||
if !prog.status.success() {
|
||||
let mut output = prog.stderr.clone();
|
||||
output.extend_from_slice(&prog.stdout);
|
||||
let escaped_output = escape_string(&output);
|
||||
let escaped_output = escape_linker_output(&output, flavor);
|
||||
// FIXME: Add UI tests for this error.
|
||||
let err = errors::LinkingFailed {
|
||||
linker_path: &linker_path,
|
||||
|
@ -1052,6 +1052,83 @@ fn escape_string(s: &[u8]) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn escape_linker_output(s: &[u8], _flavour: LinkerFlavor) -> String {
|
||||
escape_string(s)
|
||||
}
|
||||
|
||||
/// If the output of the msvc linker is not UTF-8 and the host is Windows,
|
||||
/// then try to convert the string from the OEM encoding.
|
||||
#[cfg(windows)]
|
||||
fn escape_linker_output(s: &[u8], flavour: LinkerFlavor) -> String {
|
||||
// This only applies to the actual MSVC linker.
|
||||
if flavour != LinkerFlavor::Msvc(Lld::No) {
|
||||
return escape_string(s);
|
||||
}
|
||||
match str::from_utf8(s) {
|
||||
Ok(s) => return s.to_owned(),
|
||||
Err(_) => match win::locale_byte_str_to_string(s, win::oem_code_page()) {
|
||||
Some(s) => s,
|
||||
// The string is not UTF-8 and isn't valid for the OEM code page
|
||||
None => format!("Non-UTF-8 output: {}", s.escape_ascii()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrappers around the Windows API.
|
||||
#[cfg(windows)]
|
||||
mod win {
|
||||
use windows::Win32::Globalization::{
|
||||
GetLocaleInfoEx, MultiByteToWideChar, CP_OEMCP, LOCALE_IUSEUTF8LEGACYOEMCP,
|
||||
LOCALE_NAME_SYSTEM_DEFAULT, LOCALE_RETURN_NUMBER, MB_ERR_INVALID_CHARS,
|
||||
};
|
||||
|
||||
/// Get the Windows system OEM code page. This is most notably the code page
|
||||
/// used for link.exe's output.
|
||||
pub fn oem_code_page() -> u32 {
|
||||
unsafe {
|
||||
let mut cp: u32 = 0;
|
||||
// We're using the `LOCALE_RETURN_NUMBER` flag to return a u32.
|
||||
// But the API requires us to pass the data as though it's a [u16] string.
|
||||
let len = std::mem::size_of::<u32>() / std::mem::size_of::<u16>();
|
||||
let data = std::slice::from_raw_parts_mut(&mut cp as *mut u32 as *mut u16, len);
|
||||
let len_written = GetLocaleInfoEx(
|
||||
LOCALE_NAME_SYSTEM_DEFAULT,
|
||||
LOCALE_IUSEUTF8LEGACYOEMCP | LOCALE_RETURN_NUMBER,
|
||||
Some(data),
|
||||
);
|
||||
if len_written as usize == len { cp } else { CP_OEMCP }
|
||||
}
|
||||
}
|
||||
/// Try to convert a multi-byte string to a UTF-8 string using the given code page
|
||||
/// The string does not need to be null terminated.
|
||||
///
|
||||
/// This is implemented as a wrapper around `MultiByteToWideChar`.
|
||||
/// See <https://learn.microsoft.com/en-us/windows/win32/api/stringapiset/nf-stringapiset-multibytetowidechar>
|
||||
///
|
||||
/// It will fail if the multi-byte string is longer than `i32::MAX` or if it contains
|
||||
/// any invalid bytes for the expected encoding.
|
||||
pub fn locale_byte_str_to_string(s: &[u8], code_page: u32) -> Option<String> {
|
||||
// `MultiByteToWideChar` requires a length to be a "positive integer".
|
||||
if s.len() > isize::MAX as usize {
|
||||
return None;
|
||||
}
|
||||
// Error if the string is not valid for the expected code page.
|
||||
let flags = MB_ERR_INVALID_CHARS;
|
||||
// Call MultiByteToWideChar twice.
|
||||
// First to calculate the length then to convert the string.
|
||||
let mut len = unsafe { MultiByteToWideChar(code_page, flags, s, None) };
|
||||
if len > 0 {
|
||||
let mut utf16 = vec![0; len as usize];
|
||||
len = unsafe { MultiByteToWideChar(code_page, flags, s, Some(&mut utf16)) };
|
||||
if len > 0 {
|
||||
return utf16.get(..len as usize).map(String::from_utf16_lossy);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut dyn Linker) {
|
||||
// On macOS the runtimes are distributed as dylibs which should be linked to
|
||||
// both executables and dynamic shared objects. Everywhere else the runtimes
|
||||
|
|
|
@ -184,6 +184,9 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
|
|||
CallDesugaringKind::TryBlockFromOutput => {
|
||||
error!("`try` block cannot convert `{}` to the result in {}s")
|
||||
}
|
||||
CallDesugaringKind::Await => {
|
||||
error!("cannot convert `{}` into a future in {}s")
|
||||
}
|
||||
};
|
||||
|
||||
diag_trait(&mut err, self_ty, kind.trait_def_id(tcx));
|
||||
|
|
|
@ -310,8 +310,8 @@ declare_features! (
|
|||
/// Allows `async || body` closures.
|
||||
(active, async_closure, "1.37.0", Some(62290), None),
|
||||
/// Allows async functions to be declared, implemented, and used in traits.
|
||||
(incomplete, async_fn_in_trait, "1.66.0", Some(91611), None),
|
||||
/// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries.
|
||||
(active, async_fn_in_trait, "1.66.0", Some(91611), None),
|
||||
/// Treat `extern "C"` function as nounwind.
|
||||
(active, c_unwind, "1.52.0", Some(74990), None),
|
||||
/// Allows using C-variadics.
|
||||
(active, c_variadic, "1.34.0", Some(44930), None),
|
||||
|
@ -496,7 +496,7 @@ declare_features! (
|
|||
/// Allows `repr(simd)` and importing the various simd intrinsics.
|
||||
(active, repr_simd, "1.4.0", Some(27731), None),
|
||||
/// Allows return-position `impl Trait` in traits.
|
||||
(incomplete, return_position_impl_trait_in_trait, "1.65.0", Some(91611), None),
|
||||
(active, return_position_impl_trait_in_trait, "1.65.0", Some(91611), None),
|
||||
/// Allows bounding the return type of AFIT/RPITIT.
|
||||
(incomplete, return_type_notation, "1.70.0", Some(109417), None),
|
||||
/// Allows `extern "rust-cold"`.
|
||||
|
|
|
@ -344,7 +344,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
|||
),
|
||||
ungated!(link_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
|
||||
ungated!(no_link, Normal, template!(Word), WarnFollowing),
|
||||
ungated!(repr, Normal, template!(List: "C"), DuplicatesOk),
|
||||
ungated!(repr, Normal, template!(List: "C"), DuplicatesOk, @only_local: true),
|
||||
ungated!(export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
|
||||
ungated!(link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
|
||||
ungated!(no_mangle, Normal, template!(Word), WarnFollowing, @only_local: true),
|
||||
|
|
|
@ -381,6 +381,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
sym::unlikely => (0, vec![tcx.types.bool], tcx.types.bool),
|
||||
|
||||
sym::read_via_copy => (1, vec![tcx.mk_imm_ptr(param(0))], param(0)),
|
||||
sym::write_via_move => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
|
||||
|
||||
sym::discriminant_value => {
|
||||
let assoc_items = tcx.associated_item_def_ids(
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::errors;
|
|||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_middle::dep_graph::{SerializedDepGraph, WorkProduct, WorkProductId};
|
||||
use rustc_middle::ty::OnDiskCache;
|
||||
use rustc_middle::query::on_disk_cache::OnDiskCache;
|
||||
use rustc_serialize::opaque::MemDecoder;
|
||||
use rustc_serialize::Decodable;
|
||||
use rustc_session::config::IncrementalStateAssertion;
|
||||
|
@ -211,7 +211,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
|
|||
/// If we are not in incremental compilation mode, returns `None`.
|
||||
/// Otherwise, tries to load the query result cache from disk,
|
||||
/// creating an empty cache if it could not be loaded.
|
||||
pub fn load_query_result_cache<'a, C: OnDiskCache<'a>>(sess: &'a Session) -> Option<C> {
|
||||
pub fn load_query_result_cache(sess: &Session) -> Option<OnDiskCache<'_>> {
|
||||
if sess.opts.incremental.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
@ -223,7 +223,9 @@ pub fn load_query_result_cache<'a, C: OnDiskCache<'a>>(sess: &'a Session) -> Opt
|
|||
&query_cache_path(sess),
|
||||
sess.is_nightly_build(),
|
||||
) {
|
||||
LoadResult::Ok { data: (bytes, start_pos) } => Some(C::new(sess, bytes, start_pos)),
|
||||
_ => Some(C::new_empty(sess.source_map())),
|
||||
LoadResult::Ok { data: (bytes, start_pos) } => {
|
||||
Some(OnDiskCache::new(sess, bytes, start_pos))
|
||||
}
|
||||
_ => Some(OnDiskCache::new_empty(sess.source_map())),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
|
|||
move || {
|
||||
sess.time("incr_comp_persist_result_cache", || {
|
||||
// Drop the memory map so that we can remove the file and write to it.
|
||||
if let Some(odc) = &tcx.on_disk_cache {
|
||||
if let Some(odc) = &tcx.query_system.on_disk_cache {
|
||||
odc.drop_serialized_data(tcx);
|
||||
}
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ rustc_lint = { path = "../rustc_lint" }
|
|||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_plugin_impl = { path = "../rustc_plugin_impl" }
|
||||
rustc_privacy = { path = "../rustc_privacy" }
|
||||
rustc_query_system = { path = "../rustc_query_system" }
|
||||
rustc_query_impl = { path = "../rustc_query_impl" }
|
||||
rustc_resolve = { path = "../rustc_resolve" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
|
|
|
@ -12,6 +12,7 @@ use rustc_lint::LintStore;
|
|||
use rustc_middle::ty;
|
||||
use rustc_parse::maybe_new_parser_from_source_str;
|
||||
use rustc_query_impl::QueryCtxt;
|
||||
use rustc_query_system::query::print_query_stack;
|
||||
use rustc_session::config::{self, CheckCfg, ErrorOutputType, Input, OutputFilenames};
|
||||
use rustc_session::lint;
|
||||
use rustc_session::parse::{CrateConfig, ParseSess};
|
||||
|
@ -317,7 +318,7 @@ pub fn try_print_query_stack(handler: &Handler, num_frames: Option<usize>) {
|
|||
// state if it was responsible for triggering the panic.
|
||||
let i = ty::tls::with_context_opt(|icx| {
|
||||
if let Some(icx) = icx {
|
||||
QueryCtxt::from_tcx(icx.tcx).try_print_query_stack(icx.query, handler, num_frames)
|
||||
print_query_stack(QueryCtxt { tcx: icx.tcx }, icx.query, handler, num_frames)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ use rustc_mir_build as mir_build;
|
|||
use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_attr};
|
||||
use rustc_passes::{self, hir_stats, layout_test};
|
||||
use rustc_plugin_impl as plugin;
|
||||
use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
|
||||
use rustc_resolve::Resolver;
|
||||
use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
|
||||
use rustc_session::cstore::{MetadataLoader, Untracked};
|
||||
|
@ -669,7 +668,6 @@ pub fn create_global_ctxt<'tcx>(
|
|||
lint_store: Lrc<LintStore>,
|
||||
dep_graph: DepGraph,
|
||||
untracked: Untracked,
|
||||
queries: &'tcx OnceCell<TcxQueries<'tcx>>,
|
||||
gcx_cell: &'tcx OnceCell<GlobalCtxt<'tcx>>,
|
||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||
|
@ -693,10 +691,6 @@ pub fn create_global_ctxt<'tcx>(
|
|||
callback(sess, &mut local_providers, &mut extern_providers);
|
||||
}
|
||||
|
||||
let queries = queries.get_or_init(|| {
|
||||
TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
|
||||
});
|
||||
|
||||
sess.time("setup_global_ctxt", || {
|
||||
gcx_cell.get_or_init(move || {
|
||||
TyCtxt::create_global_ctxt(
|
||||
|
@ -706,9 +700,9 @@ pub fn create_global_ctxt<'tcx>(
|
|||
hir_arena,
|
||||
untracked,
|
||||
dep_graph,
|
||||
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
|
||||
queries.as_dyn(),
|
||||
query_result_on_disk_cache,
|
||||
rustc_query_impl::query_callbacks(arena),
|
||||
rustc_query_impl::query_system_fns(local_providers, extern_providers),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,7 +16,6 @@ use rustc_metadata::creader::CStore;
|
|||
use rustc_middle::arena::Arena;
|
||||
use rustc_middle::dep_graph::DepGraph;
|
||||
use rustc_middle::ty::{GlobalCtxt, TyCtxt};
|
||||
use rustc_query_impl::Queries as TcxQueries;
|
||||
use rustc_session::config::{self, OutputFilenames, OutputType};
|
||||
use rustc_session::cstore::Untracked;
|
||||
use rustc_session::{output::find_crate_name, Session};
|
||||
|
@ -81,7 +80,6 @@ impl<T> Default for Query<T> {
|
|||
pub struct Queries<'tcx> {
|
||||
compiler: &'tcx Compiler,
|
||||
gcx_cell: OnceCell<GlobalCtxt<'tcx>>,
|
||||
queries: OnceCell<TcxQueries<'tcx>>,
|
||||
|
||||
arena: WorkerLocal<Arena<'tcx>>,
|
||||
hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||
|
@ -102,7 +100,6 @@ impl<'tcx> Queries<'tcx> {
|
|||
Queries {
|
||||
compiler,
|
||||
gcx_cell: OnceCell::new(),
|
||||
queries: OnceCell::new(),
|
||||
arena: WorkerLocal::new(|_| Arena::default()),
|
||||
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
|
||||
dep_graph_future: Default::default(),
|
||||
|
@ -225,7 +222,6 @@ impl<'tcx> Queries<'tcx> {
|
|||
lint_store,
|
||||
self.dep_graph()?.steal(),
|
||||
untracked,
|
||||
&self.queries,
|
||||
&self.gcx_cell,
|
||||
&self.arena,
|
||||
&self.hir_arena,
|
||||
|
|
|
@ -170,7 +170,8 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
|||
) -> R {
|
||||
use rustc_data_structures::jobserver;
|
||||
use rustc_middle::ty::tls;
|
||||
use rustc_query_impl::{deadlock, QueryContext, QueryCtxt};
|
||||
use rustc_query_impl::QueryCtxt;
|
||||
use rustc_query_system::query::{deadlock, QueryContext};
|
||||
|
||||
let registry = sync::Registry::new(threads);
|
||||
let mut builder = rayon::ThreadPoolBuilder::new()
|
||||
|
@ -182,7 +183,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
|||
// On deadlock, creates a new thread and forwards information in thread
|
||||
// locals to it. The new thread runs the deadlock handler.
|
||||
let query_map = tls::with(|tcx| {
|
||||
QueryCtxt::from_tcx(tcx)
|
||||
QueryCtxt::new(tcx)
|
||||
.try_collect_active_jobs()
|
||||
.expect("active jobs shouldn't be locked in deadlock handler")
|
||||
});
|
||||
|
|
|
@ -4014,7 +4014,6 @@ declare_lint! {
|
|||
/// ### Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(c_unwind)]
|
||||
/// #![warn(ffi_unwind_calls)]
|
||||
///
|
||||
/// extern "C-unwind" {
|
||||
|
@ -4037,8 +4036,7 @@ declare_lint! {
|
|||
/// that desire this ability it is therefore necessary to avoid such calls.
|
||||
pub FFI_UNWIND_CALLS,
|
||||
Allow,
|
||||
"call to foreign functions or function pointers with FFI-unwind ABI",
|
||||
@feature_gate = sym::c_unwind;
|
||||
"call to foreign functions or function pointers with FFI-unwind ABI"
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
|
|
|
@ -11,6 +11,7 @@ chalk-ir = "0.87.0"
|
|||
derive_more = "0.99.17"
|
||||
either = "1.5.0"
|
||||
gsgdt = "0.1.2"
|
||||
measureme = "10.0.0"
|
||||
polonius-engine = "0.13.0"
|
||||
rustc_apfloat = { path = "../rustc_apfloat" }
|
||||
rustc_arena = { path = "../rustc_arena" }
|
||||
|
|
|
@ -227,7 +227,9 @@ pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
|
|||
// References to statics doesn't need to know about their allocations,
|
||||
// just about its `DefId`.
|
||||
AllocDiscriminant::Static.encode(encoder);
|
||||
did.encode(encoder);
|
||||
// Cannot use `did.encode(encoder)` because of a bug around
|
||||
// specializations and method calls.
|
||||
Encodable::<E>::encode(&did, encoder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ use rustc_span::def_id::LOCAL_CRATE;
|
|||
|
||||
pub mod erase;
|
||||
mod keys;
|
||||
pub mod on_disk_cache;
|
||||
pub use keys::{AsLocalKey, Key, LocalCrate};
|
||||
|
||||
// Each of these queries corresponds to a function pointer field in the
|
||||
|
@ -874,7 +875,7 @@ rustc_queries! {
|
|||
|
||||
query typeck(key: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> {
|
||||
desc { |tcx| "type-checking `{}`", tcx.def_path_str(key) }
|
||||
cache_on_disk_if { true }
|
||||
cache_on_disk_if(tcx) { !tcx.is_typeck_child(key.to_def_id()) }
|
||||
}
|
||||
query diagnostic_only_typeck(key: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> {
|
||||
desc { |tcx| "type-checking `{}`", tcx.def_path_str(key) }
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use crate::QueryCtxt;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_data_structures::stable_hasher::Hash64;
|
||||
|
@ -13,8 +12,7 @@ use rustc_middle::mir::interpret::{AllocDecodingSession, AllocDecodingState};
|
|||
use rustc_middle::mir::{self, interpret};
|
||||
use rustc_middle::ty::codec::{RefDecodable, TyDecoder, TyEncoder};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_query_system::dep_graph::DepContext;
|
||||
use rustc_query_system::query::{QueryCache, QuerySideEffects};
|
||||
use rustc_query_system::query::QuerySideEffects;
|
||||
use rustc_serialize::{
|
||||
opaque::{FileEncodeResult, FileEncoder, IntEncodedWithFixedSize, MemDecoder},
|
||||
Decodable, Decoder, Encodable, Encoder,
|
||||
|
@ -123,10 +121,12 @@ struct SourceFileIndex(u32);
|
|||
pub struct AbsoluteBytePos(u64);
|
||||
|
||||
impl AbsoluteBytePos {
|
||||
fn new(pos: usize) -> AbsoluteBytePos {
|
||||
#[inline]
|
||||
pub fn new(pos: usize) -> AbsoluteBytePos {
|
||||
AbsoluteBytePos(pos.try_into().expect("Incremental cache file size overflowed u64."))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn to_usize(self) -> usize {
|
||||
self.0 as usize
|
||||
}
|
||||
|
@ -144,11 +144,13 @@ struct EncodedSourceFileId {
|
|||
}
|
||||
|
||||
impl EncodedSourceFileId {
|
||||
#[inline]
|
||||
fn translate(&self, tcx: TyCtxt<'_>) -> StableSourceFileId {
|
||||
let cnum = tcx.stable_crate_id_to_crate_num(self.stable_crate_id);
|
||||
StableSourceFileId { file_name_hash: self.file_name_hash, cnum }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn new(tcx: TyCtxt<'_>, file: &SourceFile) -> EncodedSourceFileId {
|
||||
let source_file_id = StableSourceFileId::new(file);
|
||||
EncodedSourceFileId {
|
||||
|
@ -158,9 +160,9 @@ impl EncodedSourceFileId {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
||||
impl<'sess> OnDiskCache<'sess> {
|
||||
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
||||
fn new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Self {
|
||||
pub fn new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Self {
|
||||
debug_assert!(sess.opts.incremental.is_some());
|
||||
|
||||
// Wrap in a scope so we can borrow `data`.
|
||||
|
@ -193,7 +195,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
|||
}
|
||||
}
|
||||
|
||||
fn new_empty(source_map: &'sess SourceMap) -> Self {
|
||||
pub fn new_empty(source_map: &'sess SourceMap) -> Self {
|
||||
Self {
|
||||
serialized_data: RwLock::new(None),
|
||||
file_index_to_stable_id: Default::default(),
|
||||
|
@ -215,7 +217,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
|||
/// Cache promotions require invoking queries, which needs to read the serialized data.
|
||||
/// In order to serialize the new on-disk cache, the former on-disk cache file needs to be
|
||||
/// deleted, hence we won't be able to refer to its memmapped data.
|
||||
fn drop_serialized_data(&self, tcx: TyCtxt<'_>) {
|
||||
pub fn drop_serialized_data(&self, tcx: TyCtxt<'_>) {
|
||||
// Load everything into memory so we can write it out to the on-disk
|
||||
// cache. The vast majority of cacheable query results should already
|
||||
// be in memory, so this should be a cheap operation.
|
||||
|
@ -227,7 +229,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
|||
*self.serialized_data.write() = None;
|
||||
}
|
||||
|
||||
fn serialize(&self, tcx: TyCtxt<'_>, encoder: FileEncoder) -> FileEncodeResult {
|
||||
pub fn serialize(&self, tcx: TyCtxt<'_>, encoder: FileEncoder) -> FileEncodeResult {
|
||||
// Serializing the `DepGraph` should not modify it.
|
||||
tcx.dep_graph.with_ignore(|| {
|
||||
// Allocate `SourceFileIndex`es.
|
||||
|
@ -269,7 +271,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
|||
tcx.sess.time("encode_query_results", || {
|
||||
let enc = &mut encoder;
|
||||
let qri = &mut query_result_index;
|
||||
QueryCtxt::from_tcx(tcx).encode_query_results(enc, qri);
|
||||
(tcx.query_system.fns.encode_query_results)(tcx, enc, qri);
|
||||
});
|
||||
|
||||
// Encode side effects.
|
||||
|
@ -358,12 +360,6 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
|||
encoder.finish()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'sess> OnDiskCache<'sess> {
|
||||
pub fn as_dyn(&self) -> &dyn rustc_middle::ty::OnDiskCache<'sess> {
|
||||
self as _
|
||||
}
|
||||
|
||||
/// Loads a `QuerySideEffects` created during the previous compilation session.
|
||||
pub fn load_side_effects(
|
||||
|
@ -380,8 +376,6 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
/// Stores a `QuerySideEffects` emitted during the current compilation session.
|
||||
/// Anything stored like this will be available via `load_side_effects` in
|
||||
/// the next compilation session.
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
pub fn store_side_effects(&self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects) {
|
||||
let mut current_side_effects = self.current_side_effects.borrow_mut();
|
||||
let prev = current_side_effects.insert(dep_node_index, side_effects);
|
||||
|
@ -389,6 +383,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
}
|
||||
|
||||
/// Return whether the cached query result can be decoded.
|
||||
#[inline]
|
||||
pub fn loadable_from_disk(&self, dep_node_index: SerializedDepNodeIndex) -> bool {
|
||||
self.query_result_index.contains_key(&dep_node_index)
|
||||
// with_decoder is infallible, so we can stop here
|
||||
|
@ -413,8 +408,6 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
/// Since many anonymous queries can share the same `DepNode`, we aggregate
|
||||
/// them -- as opposed to regular queries where we assume that there is a
|
||||
/// 1:1 relationship between query-key and `DepNode`.
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
pub fn store_side_effects_for_anon_node(
|
||||
&self,
|
||||
dep_node_index: DepNodeIndex,
|
||||
|
@ -485,6 +478,7 @@ pub struct CacheDecoder<'a, 'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
|
||||
let CacheDecoder {
|
||||
tcx,
|
||||
|
@ -705,6 +699,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
|
|||
|
||||
// copy&paste impl from rustc_metadata
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Symbol {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
let tag = d.read_u8();
|
||||
|
||||
|
@ -733,6 +728,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Symbol {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
let stable_id = StableCrateId::decode(d);
|
||||
let cnum = d.tcx.stable_crate_id_to_crate_num(stable_id);
|
||||
|
@ -754,6 +750,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex {
|
|||
// compilation sessions. We use the `DefPathHash`, which is stable across
|
||||
// sessions, to map the old `DefId` to the new one.
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
// Load the `DefPathHash` which is was we encoded the `DefId` as.
|
||||
let def_path_hash = DefPathHash::decode(d);
|
||||
|
@ -770,6 +767,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx UnordSet<LocalDefId> {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
|
@ -778,6 +776,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx UnordSet<LocalDefId>
|
|||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
||||
for &'tcx FxHashMap<DefId, ty::EarlyBinder<Ty<'tcx>>>
|
||||
{
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
|
@ -786,24 +785,28 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
|||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
||||
for &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>>
|
||||
{
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsmTemplatePiece] {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
|
@ -812,6 +815,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsm
|
|||
macro_rules! impl_ref_decoder {
|
||||
(<$tcx:tt> $($ty:ty,)*) => {
|
||||
$(impl<'a, $tcx> Decodable<CacheDecoder<'a, $tcx>> for &$tcx [$ty] {
|
||||
#[inline]
|
||||
fn decode(d: &mut CacheDecoder<'a, $tcx>) -> Self {
|
||||
RefDecodable::decode(d)
|
||||
}
|
||||
|
@ -846,6 +850,7 @@ pub struct CacheEncoder<'a, 'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> CacheEncoder<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
|
||||
self.file_to_file_index[&(&*source_file as *const SourceFile)]
|
||||
}
|
||||
|
@ -855,7 +860,7 @@ impl<'a, 'tcx> CacheEncoder<'a, 'tcx> {
|
|||
/// encode the specified tag, then the given value, then the number of
|
||||
/// bytes taken up by tag and value. On decoding, we can then verify that
|
||||
/// we get the expected tag and read the expected number of bytes.
|
||||
fn encode_tagged<T: Encodable<Self>, V: Encodable<Self>>(&mut self, tag: T, value: &V) {
|
||||
pub fn encode_tagged<T: Encodable<Self>, V: Encodable<Self>>(&mut self, tag: T, value: &V) {
|
||||
let start_pos = self.position();
|
||||
|
||||
tag.encode(self);
|
||||
|
@ -865,6 +870,7 @@ impl<'a, 'tcx> CacheEncoder<'a, 'tcx> {
|
|||
((end_pos - start_pos) as u64).encode(self);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn finish(self) -> Result<usize, io::Error> {
|
||||
self.encoder.finish()
|
||||
}
|
||||
|
@ -957,15 +963,19 @@ impl<'a, 'tcx> TyEncoder for CacheEncoder<'a, 'tcx> {
|
|||
type I = TyCtxt<'tcx>;
|
||||
const CLEAR_CROSS_CRATE: bool = false;
|
||||
|
||||
#[inline]
|
||||
fn position(&self) -> usize {
|
||||
self.encoder.position()
|
||||
}
|
||||
#[inline]
|
||||
fn type_shorthands(&mut self) -> &mut FxHashMap<Ty<'tcx>, usize> {
|
||||
&mut self.type_shorthands
|
||||
}
|
||||
#[inline]
|
||||
fn predicate_shorthands(&mut self) -> &mut FxHashMap<ty::PredicateKind<'tcx>, usize> {
|
||||
&mut self.predicate_shorthands
|
||||
}
|
||||
#[inline]
|
||||
fn encode_alloc_id(&mut self, alloc_id: &interpret::AllocId) {
|
||||
let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
|
||||
|
||||
|
@ -974,12 +984,14 @@ impl<'a, 'tcx> TyEncoder for CacheEncoder<'a, 'tcx> {
|
|||
}
|
||||
|
||||
impl<'a, 'tcx> Encodable<CacheEncoder<'a, 'tcx>> for CrateNum {
|
||||
#[inline]
|
||||
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx>) {
|
||||
s.tcx.stable_crate_id(*self).encode(s);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Encodable<CacheEncoder<'a, 'tcx>> for DefId {
|
||||
#[inline]
|
||||
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx>) {
|
||||
s.tcx.def_path_hash(*self).encode(s);
|
||||
}
|
||||
|
@ -1032,33 +1044,3 @@ impl<'a, 'tcx> Encodable<CacheEncoder<'a, 'tcx>> for [u8] {
|
|||
self.encode(&mut e.encoder);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn encode_query_results<'a, 'tcx, Q>(
|
||||
query: Q,
|
||||
qcx: QueryCtxt<'tcx>,
|
||||
encoder: &mut CacheEncoder<'a, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex,
|
||||
) where
|
||||
Q: super::QueryConfigRestored<'tcx>,
|
||||
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
|
||||
{
|
||||
let _timer = qcx
|
||||
.tcx
|
||||
.profiler()
|
||||
.verbose_generic_activity_with_arg("encode_query_results_for", query.name());
|
||||
|
||||
assert!(query.query_state(qcx).all_inactive());
|
||||
let cache = query.query_cache(qcx);
|
||||
cache.iter(&mut |key, value, dep_node| {
|
||||
if query.cache_on_disk(qcx.tcx, &key) {
|
||||
let dep_node = SerializedDepNodeIndex::new(dep_node.index());
|
||||
|
||||
// Record position of the cache entry.
|
||||
query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.encoder.position())));
|
||||
|
||||
// Encode the type check tables with the `SerializedDepNodeIndex`
|
||||
// as tag.
|
||||
encoder.encode_tagged(dep_node, &Q::restore(*value));
|
||||
}
|
||||
});
|
||||
}
|
|
@ -500,7 +500,6 @@ impl_arena_copy_decoder! {<'tcx>
|
|||
macro_rules! implement_ty_decoder {
|
||||
($DecoderName:ident <$($typaram:tt),*>) => {
|
||||
mod __ty_decoder_impl {
|
||||
use std::borrow::Cow;
|
||||
use rustc_serialize::Decoder;
|
||||
|
||||
use super::$DecoderName;
|
||||
|
|
|
@ -14,11 +14,14 @@ use crate::middle::resolve_bound_vars;
|
|||
use crate::middle::stability;
|
||||
use crate::mir::interpret::{self, Allocation, ConstAllocation};
|
||||
use crate::mir::{Body, Local, Place, PlaceElem, ProjectionKind, Promoted};
|
||||
use crate::query::on_disk_cache::OnDiskCache;
|
||||
use crate::query::LocalCrate;
|
||||
use crate::thir::Thir;
|
||||
use crate::traits;
|
||||
use crate::traits::solve;
|
||||
use crate::traits::solve::{ExternalConstraints, ExternalConstraintsData};
|
||||
use crate::ty::query::QuerySystem;
|
||||
use crate::ty::query::QuerySystemFns;
|
||||
use crate::ty::query::{self, TyCtxtAt};
|
||||
use crate::ty::{
|
||||
self, AdtDef, AdtDefData, AdtKind, Binder, Const, ConstData, FloatTy, FloatVar, FloatVid,
|
||||
|
@ -31,7 +34,6 @@ use rustc_ast::{self as ast, attr};
|
|||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_data_structures::profiling::SelfProfilerRef;
|
||||
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
|
@ -61,7 +63,6 @@ use rustc_session::lint::Lint;
|
|||
use rustc_session::Limit;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::def_id::{DefPathHash, StableCrateId};
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::{FieldIdx, Layout, LayoutS, TargetDataLayout, VariantIdx};
|
||||
|
@ -84,21 +85,6 @@ use super::query::IntoQueryParam;
|
|||
|
||||
const TINY_CONST_EVAL_LIMIT: Limit = Limit(20);
|
||||
|
||||
pub trait OnDiskCache<'tcx>: rustc_data_structures::sync::Sync {
|
||||
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
||||
fn new(sess: &'tcx Session, data: Mmap, start_pos: usize) -> Self
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn new_empty(source_map: &'tcx SourceMap) -> Self
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn drop_serialized_data(&self, tcx: TyCtxt<'tcx>);
|
||||
|
||||
fn serialize(&self, tcx: TyCtxt<'tcx>, encoder: FileEncoder) -> FileEncodeResult;
|
||||
}
|
||||
|
||||
#[allow(rustc::usage_of_ty_tykind)]
|
||||
impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
type AdtDef = ty::AdtDef<'tcx>;
|
||||
|
@ -527,13 +513,6 @@ pub struct GlobalCtxt<'tcx> {
|
|||
|
||||
untracked: Untracked,
|
||||
|
||||
/// This provides access to the incremental compilation on-disk cache for query results.
|
||||
/// Do not access this directly. It is only meant to be used by
|
||||
/// `DepGraph::try_mark_green()` and the query infrastructure.
|
||||
/// This is `None` if we are not incremental compilation mode
|
||||
pub on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
||||
|
||||
pub queries: &'tcx dyn query::QueryEngine<'tcx>,
|
||||
pub query_system: query::QuerySystem<'tcx>,
|
||||
pub(crate) query_kinds: &'tcx [DepKindStruct<'tcx>],
|
||||
|
||||
|
@ -674,9 +653,9 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
||||
untracked: Untracked,
|
||||
dep_graph: DepGraph,
|
||||
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
||||
queries: &'tcx dyn query::QueryEngine<'tcx>,
|
||||
on_disk_cache: Option<OnDiskCache<'tcx>>,
|
||||
query_kinds: &'tcx [DepKindStruct<'tcx>],
|
||||
query_system_fns: QuerySystemFns<'tcx>,
|
||||
) -> GlobalCtxt<'tcx> {
|
||||
let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| {
|
||||
s.emit_fatal(err);
|
||||
|
@ -698,9 +677,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
lifetimes: common_lifetimes,
|
||||
consts: common_consts,
|
||||
untracked,
|
||||
on_disk_cache,
|
||||
queries,
|
||||
query_system: Default::default(),
|
||||
query_system: QuerySystem::new(query_system_fns, on_disk_cache),
|
||||
query_kinds,
|
||||
ty_rcache: Default::default(),
|
||||
pred_rcache: Default::default(),
|
||||
|
@ -1039,7 +1016,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
}
|
||||
|
||||
pub fn serialize_query_result_cache(self, encoder: FileEncoder) -> FileEncodeResult {
|
||||
self.on_disk_cache.as_ref().map_or(Ok(0), |c| c.serialize(self, encoder))
|
||||
self.query_system.on_disk_cache.as_ref().map_or(Ok(0), |c| c.serialize(self, encoder))
|
||||
}
|
||||
|
||||
/// If `true`, we should use lazy normalization for constants, otherwise
|
||||
|
|
|
@ -84,8 +84,7 @@ pub use self::consts::{
|
|||
Const, ConstData, ConstInt, ConstKind, Expr, InferConst, ScalarInt, UnevaluatedConst, ValTree,
|
||||
};
|
||||
pub use self::context::{
|
||||
tls, CtxtInterners, DeducedParamAttrs, FreeRegionInfo, GlobalCtxt, Lift, OnDiskCache, TyCtxt,
|
||||
TyCtxtFeed,
|
||||
tls, CtxtInterners, DeducedParamAttrs, FreeRegionInfo, GlobalCtxt, Lift, TyCtxt, TyCtxtFeed,
|
||||
};
|
||||
pub use self::instance::{Instance, InstanceDef, ShortInstance, UnusedGenericParams};
|
||||
pub use self::list::List;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#![allow(unused_parens)]
|
||||
|
||||
use crate::dep_graph;
|
||||
use crate::dep_graph::DepKind;
|
||||
use crate::infer::canonical::{self, Canonical};
|
||||
use crate::lint::LintExpectation;
|
||||
use crate::metadata::ModChild;
|
||||
|
@ -17,7 +18,11 @@ use crate::mir::interpret::{
|
|||
};
|
||||
use crate::mir::interpret::{LitToConstError, LitToConstInput};
|
||||
use crate::mir::mono::CodegenUnit;
|
||||
|
||||
use crate::query::erase::{erase, restore, Erase};
|
||||
use crate::query::on_disk_cache::CacheEncoder;
|
||||
use crate::query::on_disk_cache::EncodedDepNodeIndex;
|
||||
use crate::query::on_disk_cache::OnDiskCache;
|
||||
use crate::query::{AsLocalKey, Key};
|
||||
use crate::thir;
|
||||
use crate::traits::query::{
|
||||
|
@ -38,13 +43,16 @@ use crate::ty::subst::{GenericArg, SubstsRef};
|
|||
use crate::ty::util::AlwaysRequiresDrop;
|
||||
use crate::ty::GeneratorDiagnosticData;
|
||||
use crate::ty::{self, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt, UnusedGenericParams};
|
||||
use measureme::StringId;
|
||||
use rustc_arena::TypedArena;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::expand::allocator::AllocatorKind;
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::AtomicU64;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::sync::WorkerLocal;
|
||||
use rustc_data_structures::unord::UnordSet;
|
||||
|
@ -58,6 +66,7 @@ use rustc_hir::hir_id::OwnerId;
|
|||
use rustc_hir::lang_items::{LangItem, LanguageItems};
|
||||
use rustc_hir::{Crate, ItemLocalId, TraitCandidate};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
pub(crate) use rustc_query_system::query::QueryJobId;
|
||||
use rustc_query_system::query::*;
|
||||
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};
|
||||
|
@ -76,17 +85,70 @@ use std::ops::Deref;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
pub struct QueryKeyStringCache {
|
||||
pub def_id_cache: FxHashMap<DefId, StringId>,
|
||||
}
|
||||
|
||||
impl QueryKeyStringCache {
|
||||
pub fn new() -> QueryKeyStringCache {
|
||||
QueryKeyStringCache { def_id_cache: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct QueryStruct<'tcx> {
|
||||
pub try_collect_active_jobs: fn(TyCtxt<'tcx>, &mut QueryMap<DepKind>) -> Option<()>,
|
||||
pub alloc_self_profile_query_strings: fn(TyCtxt<'tcx>, &mut QueryKeyStringCache),
|
||||
pub encode_query_results:
|
||||
Option<fn(TyCtxt<'tcx>, &mut CacheEncoder<'_, 'tcx>, &mut EncodedDepNodeIndex)>,
|
||||
}
|
||||
|
||||
pub struct QuerySystemFns<'tcx> {
|
||||
pub engine: QueryEngine,
|
||||
pub local_providers: Providers,
|
||||
pub extern_providers: ExternProviders,
|
||||
pub query_structs: Vec<QueryStruct<'tcx>>,
|
||||
pub encode_query_results: fn(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
encoder: &mut CacheEncoder<'_, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex,
|
||||
),
|
||||
pub try_mark_green: fn(tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct QuerySystem<'tcx> {
|
||||
pub states: QueryStates<'tcx>,
|
||||
pub arenas: QueryArenas<'tcx>,
|
||||
pub caches: QueryCaches<'tcx>,
|
||||
|
||||
/// This provides access to the incremental compilation on-disk cache for query results.
|
||||
/// Do not access this directly. It is only meant to be used by
|
||||
/// `DepGraph::try_mark_green()` and the query infrastructure.
|
||||
/// This is `None` if we are not incremental compilation mode
|
||||
pub on_disk_cache: Option<OnDiskCache<'tcx>>,
|
||||
|
||||
pub fns: QuerySystemFns<'tcx>,
|
||||
|
||||
pub jobs: AtomicU64,
|
||||
|
||||
// Since we erase query value types we tell the typesystem about them with `PhantomData`.
|
||||
_phantom_values: QueryPhantomValues<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> QuerySystem<'tcx> {
|
||||
pub fn new(fns: QuerySystemFns<'tcx>, on_disk_cache: Option<OnDiskCache<'tcx>>) -> Self {
|
||||
QuerySystem {
|
||||
states: Default::default(),
|
||||
arenas: Default::default(),
|
||||
caches: Default::default(),
|
||||
on_disk_cache,
|
||||
fns,
|
||||
jobs: AtomicU64::new(1),
|
||||
_phantom_values: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct TyCtxtAt<'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
|
@ -136,7 +198,41 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
}
|
||||
|
||||
pub fn try_mark_green(self, dep_node: &dep_graph::DepNode) -> bool {
|
||||
self.queries.try_mark_green(self, dep_node)
|
||||
(self.query_system.fns.try_mark_green)(self, dep_node)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn query_get_at<'tcx, Cache>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
execute_query: fn(TyCtxt<'tcx>, Span, Cache::Key, QueryMode) -> Option<Cache::Value>,
|
||||
query_cache: &Cache,
|
||||
span: Span,
|
||||
key: Cache::Key,
|
||||
) -> Cache::Value
|
||||
where
|
||||
Cache: QueryCache,
|
||||
{
|
||||
let key = key.into_query_param();
|
||||
match try_get_cached(tcx, query_cache, &key) {
|
||||
Some(value) => value,
|
||||
None => execute_query(tcx, span, key, QueryMode::Get).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn query_ensure<'tcx, Cache>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
execute_query: fn(TyCtxt<'tcx>, Span, Cache::Key, QueryMode) -> Option<Cache::Value>,
|
||||
query_cache: &Cache,
|
||||
key: Cache::Key,
|
||||
check_cache: bool,
|
||||
) where
|
||||
Cache: QueryCache,
|
||||
{
|
||||
let key = key.into_query_param();
|
||||
if try_get_cached(tcx, query_cache, &key).is_none() {
|
||||
execute_query(tcx, DUMMY_SP, key, QueryMode::Ensure { check_cache });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,17 +441,13 @@ macro_rules! define_callbacks {
|
|||
$($(#[$attr])*
|
||||
#[inline(always)]
|
||||
pub fn $name(self, key: query_helper_param_ty!($($K)*)) {
|
||||
let key = key.into_query_param();
|
||||
|
||||
match try_get_cached(self.tcx, &self.tcx.query_system.caches.$name, &key) {
|
||||
Some(_) => return,
|
||||
None => self.tcx.queries.$name(
|
||||
self.tcx,
|
||||
DUMMY_SP,
|
||||
key,
|
||||
QueryMode::Ensure { check_cache: false },
|
||||
),
|
||||
};
|
||||
query_ensure(
|
||||
self.tcx,
|
||||
self.tcx.query_system.fns.engine.$name,
|
||||
&self.tcx.query_system.caches.$name,
|
||||
key.into_query_param(),
|
||||
false,
|
||||
);
|
||||
})*
|
||||
}
|
||||
|
||||
|
@ -363,17 +455,13 @@ macro_rules! define_callbacks {
|
|||
$($(#[$attr])*
|
||||
#[inline(always)]
|
||||
pub fn $name(self, key: query_helper_param_ty!($($K)*)) {
|
||||
let key = key.into_query_param();
|
||||
|
||||
match try_get_cached(self.tcx, &self.tcx.query_system.caches.$name, &key) {
|
||||
Some(_) => return,
|
||||
None => self.tcx.queries.$name(
|
||||
self.tcx,
|
||||
DUMMY_SP,
|
||||
key,
|
||||
QueryMode::Ensure { check_cache: true },
|
||||
),
|
||||
};
|
||||
query_ensure(
|
||||
self.tcx,
|
||||
self.tcx.query_system.fns.engine.$name,
|
||||
&self.tcx.query_system.caches.$name,
|
||||
key.into_query_param(),
|
||||
true,
|
||||
);
|
||||
})*
|
||||
}
|
||||
|
||||
|
@ -392,15 +480,23 @@ macro_rules! define_callbacks {
|
|||
#[inline(always)]
|
||||
pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> $V
|
||||
{
|
||||
let key = key.into_query_param();
|
||||
|
||||
restore::<$V>(match try_get_cached(self.tcx, &self.tcx.query_system.caches.$name, &key) {
|
||||
Some(value) => value,
|
||||
None => self.tcx.queries.$name(self.tcx, self.span, key, QueryMode::Get).unwrap(),
|
||||
})
|
||||
restore::<$V>(query_get_at(
|
||||
self.tcx,
|
||||
self.tcx.query_system.fns.engine.$name,
|
||||
&self.tcx.query_system.caches.$name,
|
||||
self.span,
|
||||
key.into_query_param(),
|
||||
))
|
||||
})*
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct QueryStates<'tcx> {
|
||||
$(
|
||||
pub $name: QueryState<$($K)*, DepKind>,
|
||||
)*
|
||||
}
|
||||
|
||||
pub struct Providers {
|
||||
$(pub $name: for<'tcx> fn(
|
||||
TyCtxt<'tcx>,
|
||||
|
@ -446,19 +542,13 @@ macro_rules! define_callbacks {
|
|||
fn clone(&self) -> Self { *self }
|
||||
}
|
||||
|
||||
pub trait QueryEngine<'tcx>: rustc_data_structures::sync::Sync {
|
||||
fn as_any(&'tcx self) -> &'tcx dyn std::any::Any;
|
||||
|
||||
fn try_mark_green(&'tcx self, tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool;
|
||||
|
||||
$($(#[$attr])*
|
||||
fn $name(
|
||||
&'tcx self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
key: query_keys::$name<'tcx>,
|
||||
mode: QueryMode,
|
||||
) -> Option<Erase<$V>>;)*
|
||||
pub struct QueryEngine {
|
||||
$(pub $name: for<'tcx> fn(
|
||||
TyCtxt<'tcx>,
|
||||
Span,
|
||||
query_keys::$name<'tcx>,
|
||||
QueryMode,
|
||||
) -> Option<Erase<$V>>,)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@ pub enum CallDesugaringKind {
|
|||
QuestionFromResidual,
|
||||
/// try { ..; x } calls type_of(x)::from_output(x)
|
||||
TryBlockFromOutput,
|
||||
/// `.await` calls `IntoFuture::into_future`
|
||||
Await,
|
||||
}
|
||||
|
||||
impl CallDesugaringKind {
|
||||
|
@ -29,6 +31,7 @@ impl CallDesugaringKind {
|
|||
tcx.require_lang_item(LangItem::Try, None)
|
||||
}
|
||||
Self::QuestionFromResidual => tcx.get_diagnostic_item(sym::FromResidual).unwrap(),
|
||||
Self::Await => tcx.get_diagnostic_item(sym::IntoFuture).unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -129,6 +132,8 @@ pub fn call_kind<'tcx>(
|
|||
&& fn_call_span.desugaring_kind() == Some(DesugaringKind::TryBlock)
|
||||
{
|
||||
Some((CallDesugaringKind::TryBlockFromOutput, method_substs.type_at(0)))
|
||||
} else if fn_call_span.is_desugaring(DesugaringKind::Await) {
|
||||
Some((CallDesugaringKind::Await, method_substs.type_at(0)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
|
@ -334,9 +334,6 @@ impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
|
|||
let refutable = !is_let_irrefutable(&mut ncx, local_lint_level, tpat);
|
||||
Some((expr.span, refutable))
|
||||
}
|
||||
ExprKind::LogicalOp { op: LogicalOp::And, .. } => {
|
||||
bug!()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
|
|
@ -56,8 +56,11 @@ impl<'tcx> Visitor<'tcx> for PackedRefChecker<'_, 'tcx> {
|
|||
"reference to packed field is unaligned"
|
||||
)
|
||||
.note(
|
||||
"fields of packed structs are not properly aligned, and creating \
|
||||
a misaligned reference is undefined behavior (even if that \
|
||||
"packed structs are only aligned by one byte, and many modern architectures \
|
||||
penalize unaligned field accesses"
|
||||
)
|
||||
.note(
|
||||
"creating a misaligned reference is undefined behavior (even if that \
|
||||
reference is never dereferenced)",
|
||||
).help(
|
||||
"copy the field contents to a local variable, or replace the \
|
||||
|
|
|
@ -616,13 +616,10 @@ fn promoted_mir(tcx: TyCtxt<'_>, def: LocalDefId) -> &IndexVec<Promoted, Body<'_
|
|||
return tcx.arena.alloc(IndexVec::new());
|
||||
}
|
||||
|
||||
let tainted_by_errors = tcx.mir_borrowck(def).tainted_by_errors;
|
||||
tcx.ensure_with_value().mir_borrowck(def);
|
||||
let mut promoted = tcx.mir_promoted(def).1.steal();
|
||||
|
||||
for body in &mut promoted {
|
||||
if let Some(error_reported) = tainted_by_errors {
|
||||
body.tainted_by_errors = Some(error_reported);
|
||||
}
|
||||
run_analysis_to_runtime_passes(tcx, body);
|
||||
}
|
||||
|
||||
|
|
|
@ -179,6 +179,29 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
|
|||
}
|
||||
}
|
||||
}
|
||||
sym::write_via_move => {
|
||||
let target = target.unwrap();
|
||||
let Ok([ptr, val]) = <[_; 2]>::try_from(std::mem::take(args)) else {
|
||||
span_bug!(
|
||||
terminator.source_info.span,
|
||||
"Wrong number of arguments for write_via_move intrinsic",
|
||||
);
|
||||
};
|
||||
let derefed_place =
|
||||
if let Some(place) = ptr.place() && let Some(local) = place.as_local() {
|
||||
tcx.mk_place_deref(local.into())
|
||||
} else {
|
||||
span_bug!(terminator.source_info.span, "Only passing a local is supported");
|
||||
};
|
||||
block.statements.push(Statement {
|
||||
source_info: terminator.source_info,
|
||||
kind: StatementKind::Assign(Box::new((
|
||||
derefed_place,
|
||||
Rvalue::Use(val),
|
||||
))),
|
||||
});
|
||||
terminator.kind = TerminatorKind::Goto { target };
|
||||
}
|
||||
sym::discriminant_value => {
|
||||
if let (Some(target), Some(arg)) = (*target, args[0].place()) {
|
||||
let arg = tcx.mk_place_deref(arg);
|
||||
|
|
|
@ -1646,7 +1646,7 @@ impl<'a> Parser<'a> {
|
|||
// Avoid knock-down errors as we don't know whether to interpret this as `foo().await?`
|
||||
// or `foo()?.await` (the very reason we went with postfix syntax 😅).
|
||||
ExprKind::Try(_) => ExprKind::Err,
|
||||
_ => ExprKind::Await(expr),
|
||||
_ => ExprKind::Await(expr, await_sp),
|
||||
};
|
||||
let expr = self.mk_expr(lo.to(sp), kind);
|
||||
self.maybe_recover_from_bad_qpath(expr)
|
||||
|
|
|
@ -859,7 +859,7 @@ impl<'a> Parser<'a> {
|
|||
ExprKind::Field(_, _) => "a field access",
|
||||
ExprKind::MethodCall(_) => "a method call",
|
||||
ExprKind::Call(_, _) => "a function call",
|
||||
ExprKind::Await(_) => "`.await`",
|
||||
ExprKind::Await(_, _) => "`.await`",
|
||||
ExprKind::Err => return Ok(with_postfix),
|
||||
_ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
|
||||
}
|
||||
|
@ -3252,7 +3252,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
|
||||
let span = lo.to(self.prev_token.span);
|
||||
let await_expr = self.mk_expr(span, ExprKind::Await(self_arg));
|
||||
let await_expr = self.mk_expr(span, ExprKind::Await(self_arg, self.prev_token.span));
|
||||
self.recover_from_await_method_call();
|
||||
await_expr
|
||||
}
|
||||
|
|
|
@ -11,12 +11,10 @@
|
|||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_macros;
|
||||
#[macro_use]
|
||||
extern crate rustc_middle;
|
||||
|
||||
use rustc_data_structures::sync::AtomicU64;
|
||||
use crate::plumbing::{encode_all_query_results, try_mark_green};
|
||||
use rustc_middle::arena::Arena;
|
||||
use rustc_middle::dep_graph::{self, DepKind, DepKindStruct};
|
||||
use rustc_middle::query::erase::{erase, restore, Erase};
|
||||
|
@ -24,7 +22,7 @@ use rustc_middle::query::AsLocalKey;
|
|||
use rustc_middle::ty::query::{
|
||||
query_keys, query_provided, query_provided_to_value, query_storage, query_values,
|
||||
};
|
||||
use rustc_middle::ty::query::{ExternProviders, Providers, QueryEngine};
|
||||
use rustc_middle::ty::query::{ExternProviders, Providers, QueryEngine, QuerySystemFns};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_query_system::dep_graph::SerializedDepNodeIndex;
|
||||
use rustc_query_system::Value;
|
||||
|
@ -32,15 +30,10 @@ use rustc_span::Span;
|
|||
|
||||
#[macro_use]
|
||||
mod plumbing;
|
||||
pub use plumbing::QueryCtxt;
|
||||
use rustc_query_system::query::*;
|
||||
#[cfg(parallel_compiler)]
|
||||
pub use rustc_query_system::query::{deadlock, QueryContext};
|
||||
pub use crate::plumbing::QueryCtxt;
|
||||
|
||||
pub use rustc_query_system::query::QueryConfig;
|
||||
|
||||
mod on_disk_cache;
|
||||
pub use on_disk_cache::OnDiskCache;
|
||||
use rustc_query_system::query::*;
|
||||
|
||||
mod profiling_support;
|
||||
pub use self::profiling_support::alloc_self_profile_query_strings;
|
||||
|
@ -54,9 +47,16 @@ trait QueryConfigRestored<'tcx>: QueryConfig<QueryCtxt<'tcx>> + Default {
|
|||
|
||||
rustc_query_append! { define_queries! }
|
||||
|
||||
impl<'tcx> Queries<'tcx> {
|
||||
// Force codegen in the dyn-trait transformation in this crate.
|
||||
pub fn as_dyn(&'tcx self) -> &'tcx dyn QueryEngine<'tcx> {
|
||||
self
|
||||
pub fn query_system_fns<'tcx>(
|
||||
local_providers: Providers,
|
||||
extern_providers: ExternProviders,
|
||||
) -> QuerySystemFns<'tcx> {
|
||||
QuerySystemFns {
|
||||
engine: engine(),
|
||||
local_providers,
|
||||
extern_providers,
|
||||
query_structs: make_dep_kind_array!(query_structs).to_vec(),
|
||||
encode_query_results: encode_all_query_results,
|
||||
try_mark_green: try_mark_green,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,35 +2,44 @@
|
|||
//! generate the actual methods on tcx which find and execute the provider,
|
||||
//! manage the caches, and so forth.
|
||||
|
||||
use crate::on_disk_cache::{CacheDecoder, CacheEncoder, EncodedDepNodeIndex};
|
||||
use crate::profiling_support::QueryKeyStringCache;
|
||||
use crate::{on_disk_cache, Queries};
|
||||
use crate::rustc_middle::dep_graph::DepContext;
|
||||
use crate::rustc_middle::ty::TyEncoder;
|
||||
use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{AtomicU64, Lock};
|
||||
use rustc_errors::{Diagnostic, Handler};
|
||||
use rustc_data_structures::sync::Lock;
|
||||
use rustc_errors::Diagnostic;
|
||||
use rustc_index::Idx;
|
||||
use rustc_middle::dep_graph::{
|
||||
self, DepKind, DepKindStruct, DepNode, DepNodeIndex, SerializedDepNodeIndex,
|
||||
};
|
||||
use rustc_middle::query::on_disk_cache::AbsoluteBytePos;
|
||||
use rustc_middle::query::on_disk_cache::{CacheDecoder, CacheEncoder, EncodedDepNodeIndex};
|
||||
use rustc_middle::query::Key;
|
||||
use rustc_middle::ty::tls::{self, ImplicitCtxt};
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
use rustc_query_system::query::{
|
||||
force_query, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffects, QueryStackFrame,
|
||||
force_query, QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffects,
|
||||
QueryStackFrame,
|
||||
};
|
||||
use rustc_query_system::{LayoutOfDepth, QueryOverflow};
|
||||
use rustc_serialize::Decodable;
|
||||
use rustc_serialize::Encodable;
|
||||
use rustc_session::Limit;
|
||||
use rustc_span::def_id::LOCAL_CRATE;
|
||||
use std::any::Any;
|
||||
use std::num::NonZeroU64;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct QueryCtxt<'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
pub queries: &'tcx Queries<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> QueryCtxt<'tcx> {
|
||||
#[inline]
|
||||
pub fn new(tcx: TyCtxt<'tcx>) -> Self {
|
||||
QueryCtxt { tcx }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> std::ops::Deref for QueryCtxt<'tcx> {
|
||||
|
@ -53,44 +62,56 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
|
|||
}
|
||||
|
||||
impl QueryContext for QueryCtxt<'_> {
|
||||
#[inline]
|
||||
fn next_job_id(self) -> QueryJobId {
|
||||
QueryJobId(
|
||||
NonZeroU64::new(
|
||||
self.queries.jobs.fetch_add(1, rustc_data_structures::sync::Ordering::Relaxed),
|
||||
self.query_system.jobs.fetch_add(1, rustc_data_structures::sync::Ordering::Relaxed),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn current_query_job(self) -> Option<QueryJobId> {
|
||||
tls::with_related_context(*self, |icx| icx.query)
|
||||
tls::with_related_context(self.tcx, |icx| icx.query)
|
||||
}
|
||||
|
||||
fn try_collect_active_jobs(self) -> Option<QueryMap<DepKind>> {
|
||||
self.queries.try_collect_active_jobs(*self)
|
||||
let mut jobs = QueryMap::default();
|
||||
|
||||
for query in &self.query_system.fns.query_structs {
|
||||
(query.try_collect_active_jobs)(self.tcx, &mut jobs);
|
||||
}
|
||||
|
||||
Some(jobs)
|
||||
}
|
||||
|
||||
// Interactions with on_disk_cache
|
||||
fn load_side_effects(self, prev_dep_node_index: SerializedDepNodeIndex) -> QuerySideEffects {
|
||||
self.queries
|
||||
self.query_system
|
||||
.on_disk_cache
|
||||
.as_ref()
|
||||
.map(|c| c.load_side_effects(*self, prev_dep_node_index))
|
||||
.map(|c| c.load_side_effects(self.tcx, prev_dep_node_index))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn store_side_effects(self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects) {
|
||||
if let Some(c) = self.queries.on_disk_cache.as_ref() {
|
||||
if let Some(c) = self.query_system.on_disk_cache.as_ref() {
|
||||
c.store_side_effects(dep_node_index, side_effects)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn store_side_effects_for_anon_node(
|
||||
self,
|
||||
dep_node_index: DepNodeIndex,
|
||||
side_effects: QuerySideEffects,
|
||||
) {
|
||||
if let Some(c) = self.queries.on_disk_cache.as_ref() {
|
||||
if let Some(c) = self.query_system.on_disk_cache.as_ref() {
|
||||
c.store_side_effects_for_anon_node(dep_node_index, side_effects)
|
||||
}
|
||||
}
|
||||
|
@ -109,14 +130,14 @@ impl QueryContext for QueryCtxt<'_> {
|
|||
// The `TyCtxt` stored in TLS has the same global interner lifetime
|
||||
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
|
||||
// when accessing the `ImplicitCtxt`.
|
||||
tls::with_related_context(*self, move |current_icx| {
|
||||
tls::with_related_context(self.tcx, move |current_icx| {
|
||||
if depth_limit && !self.recursion_limit().value_within_limit(current_icx.query_depth) {
|
||||
self.depth_limit_error(token);
|
||||
}
|
||||
|
||||
// Update the `ImplicitCtxt` to point to our new query job.
|
||||
let new_icx = ImplicitCtxt {
|
||||
tcx: *self,
|
||||
tcx: self.tcx,
|
||||
query: Some(token),
|
||||
diagnostics,
|
||||
query_depth: current_icx.query_depth + depth_limit as usize,
|
||||
|
@ -152,51 +173,20 @@ impl QueryContext for QueryCtxt<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryCtxt<'tcx> {
|
||||
#[inline]
|
||||
pub fn from_tcx(tcx: TyCtxt<'tcx>) -> Self {
|
||||
let queries = tcx.queries.as_any();
|
||||
let queries = unsafe {
|
||||
let queries = std::mem::transmute::<&dyn Any, &dyn Any>(queries);
|
||||
let queries = queries.downcast_ref().unwrap();
|
||||
let queries = std::mem::transmute::<&Queries<'_>, &Queries<'_>>(queries);
|
||||
queries
|
||||
};
|
||||
QueryCtxt { tcx, queries }
|
||||
}
|
||||
|
||||
pub(crate) fn on_disk_cache(self) -> Option<&'tcx on_disk_cache::OnDiskCache<'tcx>> {
|
||||
self.queries.on_disk_cache.as_ref()
|
||||
}
|
||||
|
||||
pub(super) fn encode_query_results(
|
||||
self,
|
||||
encoder: &mut CacheEncoder<'_, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex,
|
||||
) {
|
||||
for query in &self.queries.query_structs {
|
||||
if let Some(encode) = query.encode_query_results {
|
||||
encode(self, encoder, query_result_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_print_query_stack(
|
||||
self,
|
||||
query: Option<QueryJobId>,
|
||||
handler: &Handler,
|
||||
num_frames: Option<usize>,
|
||||
) -> usize {
|
||||
rustc_query_system::query::print_query_stack(self, query, handler, num_frames)
|
||||
}
|
||||
pub(super) fn try_mark_green<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool {
|
||||
tcx.dep_graph.try_mark_green(QueryCtxt::new(tcx), dep_node).is_some()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct QueryStruct<'tcx> {
|
||||
pub try_collect_active_jobs: fn(QueryCtxt<'tcx>, &mut QueryMap<DepKind>) -> Option<()>,
|
||||
pub alloc_self_profile_query_strings: fn(TyCtxt<'tcx>, &mut QueryKeyStringCache),
|
||||
pub encode_query_results:
|
||||
Option<fn(QueryCtxt<'tcx>, &mut CacheEncoder<'_, 'tcx>, &mut EncodedDepNodeIndex)>,
|
||||
pub(super) fn encode_all_query_results<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
encoder: &mut CacheEncoder<'_, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex,
|
||||
) {
|
||||
for query in &tcx.query_system.fns.query_structs {
|
||||
if let Some(encode) = query.encode_query_results {
|
||||
encode(tcx, encoder, query_result_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! handle_cycle_error {
|
||||
|
@ -276,13 +266,13 @@ macro_rules! hash_result {
|
|||
|
||||
macro_rules! call_provider {
|
||||
([][$qcx:expr, $name:ident, $key:expr]) => {{
|
||||
($qcx.queries.local_providers.$name)($qcx.tcx, $key)
|
||||
($qcx.query_system.fns.local_providers.$name)($qcx, $key)
|
||||
}};
|
||||
([(separate_provide_extern) $($rest:tt)*][$qcx:expr, $name:ident, $key:expr]) => {{
|
||||
if let Some(key) = $key.as_local_key() {
|
||||
($qcx.queries.local_providers.$name)($qcx.tcx, key)
|
||||
($qcx.query_system.fns.local_providers.$name)($qcx, key)
|
||||
} else {
|
||||
($qcx.queries.extern_providers.$name)($qcx.tcx, $key)
|
||||
($qcx.query_system.fns.extern_providers.$name)($qcx, $key)
|
||||
}
|
||||
}};
|
||||
([$other:tt $($modifiers:tt)*][$($args:tt)*]) => {
|
||||
|
@ -306,7 +296,7 @@ pub(crate) fn create_query_frame<
|
|||
'tcx,
|
||||
K: Copy + Key + for<'a> HashStable<StableHashingContext<'a>>,
|
||||
>(
|
||||
tcx: QueryCtxt<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
do_describe: fn(TyCtxt<'tcx>, K) -> String,
|
||||
key: K,
|
||||
kind: DepKind,
|
||||
|
@ -318,7 +308,7 @@ pub(crate) fn create_query_frame<
|
|||
// Showing visible path instead of any path is not that important in production.
|
||||
ty::print::with_no_visible_paths!(
|
||||
// Force filename-line mode to avoid invoking `type_of` query.
|
||||
ty::print::with_forced_impl_filename_line!(do_describe(tcx.tcx, key))
|
||||
ty::print::with_forced_impl_filename_line!(do_describe(tcx, key))
|
||||
)
|
||||
);
|
||||
let description =
|
||||
|
@ -328,7 +318,7 @@ pub(crate) fn create_query_frame<
|
|||
// so exit to avoid infinite recursion.
|
||||
None
|
||||
} else {
|
||||
Some(key.default_span(*tcx))
|
||||
Some(key.default_span(tcx))
|
||||
};
|
||||
let def_id = key.key_as_def_id();
|
||||
let def_kind = if kind == dep_graph::DepKind::opt_def_kind {
|
||||
|
@ -350,6 +340,34 @@ pub(crate) fn create_query_frame<
|
|||
QueryStackFrame::new(description, span, def_id, def_kind, kind, ty_adt_id, hash)
|
||||
}
|
||||
|
||||
pub(crate) fn encode_query_results<'a, 'tcx, Q>(
|
||||
query: Q,
|
||||
qcx: QueryCtxt<'tcx>,
|
||||
encoder: &mut CacheEncoder<'a, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex,
|
||||
) where
|
||||
Q: super::QueryConfigRestored<'tcx>,
|
||||
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
|
||||
{
|
||||
let _timer =
|
||||
qcx.profiler().verbose_generic_activity_with_arg("encode_query_results_for", query.name());
|
||||
|
||||
assert!(query.query_state(qcx).all_inactive());
|
||||
let cache = query.query_cache(qcx);
|
||||
cache.iter(&mut |key, value, dep_node| {
|
||||
if query.cache_on_disk(qcx.tcx, &key) {
|
||||
let dep_node = SerializedDepNodeIndex::new(dep_node.index());
|
||||
|
||||
// Record position of the cache entry.
|
||||
query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.position())));
|
||||
|
||||
// Encode the type check tables with the `SerializedDepNodeIndex`
|
||||
// as tag.
|
||||
encoder.encode_tagged(dep_node, &Q::restore(*value));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn try_load_from_on_disk_cache<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode)
|
||||
where
|
||||
Q: QueryConfig<QueryCtxt<'tcx>>,
|
||||
|
@ -364,8 +382,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn loadable_from_disk<'tcx>(tcx: QueryCtxt<'tcx>, id: SerializedDepNodeIndex) -> bool {
|
||||
if let Some(cache) = tcx.on_disk_cache().as_ref() {
|
||||
pub(crate) fn loadable_from_disk<'tcx>(tcx: TyCtxt<'tcx>, id: SerializedDepNodeIndex) -> bool {
|
||||
if let Some(cache) = tcx.query_system.on_disk_cache.as_ref() {
|
||||
cache.loadable_from_disk(id)
|
||||
} else {
|
||||
false
|
||||
|
@ -373,13 +391,13 @@ pub(crate) fn loadable_from_disk<'tcx>(tcx: QueryCtxt<'tcx>, id: SerializedDepNo
|
|||
}
|
||||
|
||||
pub(crate) fn try_load_from_disk<'tcx, V>(
|
||||
tcx: QueryCtxt<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
id: SerializedDepNodeIndex,
|
||||
) -> Option<V>
|
||||
where
|
||||
V: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
|
||||
{
|
||||
tcx.on_disk_cache().as_ref()?.try_load_query_result(*tcx, id)
|
||||
tcx.query_system.on_disk_cache.as_ref()?.try_load_query_result(tcx, id)
|
||||
}
|
||||
|
||||
fn force_from_dep_node<'tcx, Q>(query: Q, tcx: TyCtxt<'tcx>, dep_node: DepNode) -> bool
|
||||
|
@ -407,8 +425,7 @@ where
|
|||
if let Some(key) = Q::Key::recover(tcx, &dep_node) {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = tracing::span!(tracing::Level::TRACE, stringify!($name), ?key).entered();
|
||||
let tcx = QueryCtxt::from_tcx(tcx);
|
||||
force_query(query, tcx, key, dep_node);
|
||||
force_query(query, QueryCtxt::new(tcx), key, dep_node);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
|
@ -461,8 +478,33 @@ macro_rules! define_queries {
|
|||
(
|
||||
$($(#[$attr:meta])*
|
||||
[$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
|
||||
define_queries_struct! {
|
||||
input: ($(([$($modifiers)*] [$($attr)*] [$name]))*)
|
||||
mod get_query {
|
||||
use super::*;
|
||||
|
||||
$(
|
||||
#[inline(always)]
|
||||
#[tracing::instrument(level = "trace", skip(tcx))]
|
||||
pub(super) fn $name<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
key: query_keys::$name<'tcx>,
|
||||
mode: QueryMode,
|
||||
) -> Option<Erase<query_values::$name<'tcx>>> {
|
||||
get_query(
|
||||
queries::$name::default(),
|
||||
QueryCtxt::new(tcx),
|
||||
span,
|
||||
key,
|
||||
mode
|
||||
)
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
||||
pub(crate) fn engine() -> QueryEngine {
|
||||
QueryEngine {
|
||||
$($name: get_query::$name,)*
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
|
@ -502,7 +544,7 @@ macro_rules! define_queries {
|
|||
fn query_state<'a>(self, tcx: QueryCtxt<'tcx>) -> &'a QueryState<Self::Key, crate::dep_graph::DepKind>
|
||||
where QueryCtxt<'tcx>: 'a
|
||||
{
|
||||
&tcx.queries.$name
|
||||
&tcx.query_system.states.$name
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -521,7 +563,7 @@ macro_rules! define_queries {
|
|||
fn compute(self, qcx: QueryCtxt<'tcx>, key: Self::Key) -> Self::Value {
|
||||
query_provided_to_value::$name(
|
||||
qcx.tcx,
|
||||
call_provider!([$($modifiers)*][qcx, $name, key])
|
||||
call_provider!([$($modifiers)*][qcx.tcx, $name, key])
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -535,7 +577,7 @@ macro_rules! define_queries {
|
|||
if ::rustc_middle::query::cached::$name(_qcx.tcx, _key) {
|
||||
Some(|qcx: QueryCtxt<'tcx>, dep_node| {
|
||||
let value = $crate::plumbing::try_load_from_disk::<query_provided::$name<'tcx>>(
|
||||
qcx,
|
||||
qcx.tcx,
|
||||
dep_node
|
||||
);
|
||||
value.map(|value| query_provided_to_value::$name(qcx.tcx, value))
|
||||
|
@ -557,7 +599,7 @@ macro_rules! define_queries {
|
|||
) -> bool {
|
||||
should_ever_cache_on_disk!([$($modifiers)*] {
|
||||
self.cache_on_disk(_qcx.tcx, _key) &&
|
||||
$crate::plumbing::loadable_from_disk(_qcx, _index)
|
||||
$crate::plumbing::loadable_from_disk(_qcx.tcx, _index)
|
||||
} {
|
||||
false
|
||||
})
|
||||
|
@ -684,14 +726,13 @@ macro_rules! define_queries {
|
|||
}
|
||||
|
||||
mod query_structs {
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use $crate::plumbing::{QueryStruct, QueryCtxt};
|
||||
use $crate::profiling_support::QueryKeyStringCache;
|
||||
use rustc_query_system::query::QueryMap;
|
||||
use super::*;
|
||||
use rustc_middle::ty::query::QueryStruct;
|
||||
use rustc_middle::ty::query::QueryKeyStringCache;
|
||||
use rustc_middle::dep_graph::DepKind;
|
||||
|
||||
pub(super) const fn dummy_query_struct<'tcx>() -> QueryStruct<'tcx> {
|
||||
fn noop_try_collect_active_jobs(_: QueryCtxt<'_>, _: &mut QueryMap<DepKind>) -> Option<()> {
|
||||
fn noop_try_collect_active_jobs(_: TyCtxt<'_>, _: &mut QueryMap<DepKind>) -> Option<()> {
|
||||
None
|
||||
}
|
||||
fn noop_alloc_self_profile_query_strings(_: TyCtxt<'_>, _: &mut QueryKeyStringCache) {}
|
||||
|
@ -717,7 +758,7 @@ macro_rules! define_queries {
|
|||
let name = stringify!($name);
|
||||
$crate::plumbing::create_query_frame(tcx, rustc_middle::query::descs::$name, key, kind, name)
|
||||
};
|
||||
tcx.queries.$name.try_collect_active_jobs(
|
||||
tcx.query_system.states.$name.try_collect_active_jobs(
|
||||
tcx,
|
||||
make_query,
|
||||
qmap,
|
||||
|
@ -731,10 +772,10 @@ macro_rules! define_queries {
|
|||
string_cache,
|
||||
)
|
||||
},
|
||||
encode_query_results: expand_if_cached!([$($modifiers)*], |qcx, encoder, query_result_index|
|
||||
$crate::on_disk_cache::encode_query_results::<super::queries::$name<'tcx>>(
|
||||
encode_query_results: expand_if_cached!([$($modifiers)*], |tcx, encoder, query_result_index|
|
||||
$crate::plumbing::encode_query_results::<super::queries::$name<'tcx>>(
|
||||
super::queries::$name::default(),
|
||||
qcx,
|
||||
QueryCtxt::new(tcx),
|
||||
encoder,
|
||||
query_result_index,
|
||||
)
|
||||
|
@ -747,93 +788,3 @@ macro_rules! define_queries {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
use crate::{ExternProviders, OnDiskCache, Providers};
|
||||
|
||||
impl<'tcx> Queries<'tcx> {
|
||||
pub fn new(
|
||||
local_providers: Providers,
|
||||
extern_providers: ExternProviders,
|
||||
on_disk_cache: Option<OnDiskCache<'tcx>>,
|
||||
) -> Self {
|
||||
use crate::query_structs;
|
||||
Queries {
|
||||
local_providers: Box::new(local_providers),
|
||||
extern_providers: Box::new(extern_providers),
|
||||
query_structs: make_dep_kind_array!(query_structs).to_vec(),
|
||||
on_disk_cache,
|
||||
jobs: AtomicU64::new(1),
|
||||
..Queries::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! define_queries_struct {
|
||||
(
|
||||
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
|
||||
#[derive(Default)]
|
||||
pub struct Queries<'tcx> {
|
||||
local_providers: Box<Providers>,
|
||||
extern_providers: Box<ExternProviders>,
|
||||
query_structs: Vec<$crate::plumbing::QueryStruct<'tcx>>,
|
||||
pub on_disk_cache: Option<OnDiskCache<'tcx>>,
|
||||
jobs: AtomicU64,
|
||||
|
||||
$(
|
||||
$(#[$attr])*
|
||||
$name: QueryState<
|
||||
<queries::$name<'tcx> as QueryConfig<QueryCtxt<'tcx>>>::Key,
|
||||
rustc_middle::dep_graph::DepKind,
|
||||
>,
|
||||
)*
|
||||
}
|
||||
|
||||
impl<'tcx> Queries<'tcx> {
|
||||
pub(crate) fn try_collect_active_jobs(
|
||||
&'tcx self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> Option<QueryMap<rustc_middle::dep_graph::DepKind>> {
|
||||
let tcx = QueryCtxt { tcx, queries: self };
|
||||
let mut jobs = QueryMap::default();
|
||||
|
||||
for query in &self.query_structs {
|
||||
(query.try_collect_active_jobs)(tcx, &mut jobs);
|
||||
}
|
||||
|
||||
Some(jobs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryEngine<'tcx> for Queries<'tcx> {
|
||||
fn as_any(&'tcx self) -> &'tcx dyn std::any::Any {
|
||||
let this = unsafe { std::mem::transmute::<&Queries<'_>, &Queries<'_>>(self) };
|
||||
this as _
|
||||
}
|
||||
|
||||
fn try_mark_green(&'tcx self, tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool {
|
||||
let qcx = QueryCtxt { tcx, queries: self };
|
||||
tcx.dep_graph.try_mark_green(qcx, dep_node).is_some()
|
||||
}
|
||||
|
||||
$($(#[$attr])*
|
||||
#[inline(always)]
|
||||
#[tracing::instrument(level = "trace", skip(self, tcx))]
|
||||
fn $name(
|
||||
&'tcx self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
key: query_keys::$name<'tcx>,
|
||||
mode: QueryMode,
|
||||
) -> Option<Erase<query_values::$name<'tcx>>> {
|
||||
let qcx = QueryCtxt { tcx, queries: self };
|
||||
get_query(
|
||||
queries::$name::default(),
|
||||
qcx,
|
||||
span,
|
||||
key,
|
||||
mode
|
||||
)
|
||||
})*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,24 +1,13 @@
|
|||
use crate::QueryCtxt;
|
||||
use measureme::{StringComponent, StringId};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::profiling::SelfProfiler;
|
||||
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::DefPathData;
|
||||
use rustc_middle::ty::query::QueryKeyStringCache;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_query_system::query::QueryCache;
|
||||
use std::fmt::Debug;
|
||||
use std::io::Write;
|
||||
|
||||
pub(crate) struct QueryKeyStringCache {
|
||||
def_id_cache: FxHashMap<DefId, StringId>,
|
||||
}
|
||||
|
||||
impl QueryKeyStringCache {
|
||||
fn new() -> QueryKeyStringCache {
|
||||
QueryKeyStringCache { def_id_cache: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
struct QueryKeyStringBuilder<'p, 'tcx> {
|
||||
profiler: &'p SelfProfiler,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
@ -253,9 +242,8 @@ pub fn alloc_self_profile_query_strings(tcx: TyCtxt<'_>) {
|
|||
}
|
||||
|
||||
let mut string_cache = QueryKeyStringCache::new();
|
||||
let queries = QueryCtxt::from_tcx(tcx);
|
||||
|
||||
for query in &queries.queries.query_structs {
|
||||
for query in &tcx.query_system.fns.query_structs {
|
||||
(query.alloc_self_profile_query_strings)(tcx, &mut string_cache);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -236,7 +236,7 @@ pub(crate) struct CycleError<D: DepKind> {
|
|||
/// It returns the shard index and a lock guard to the shard,
|
||||
/// which will be used if the query is not in the cache and we need
|
||||
/// to compute it.
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
pub fn try_get_cached<Tcx, C>(tcx: Tcx, cache: &C, key: &C::Key) -> Option<C::Value>
|
||||
where
|
||||
C: QueryCache,
|
||||
|
|
|
@ -656,7 +656,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
|
|||
fn visit_anon_const(&mut self, constant: &'ast AnonConst) {
|
||||
// We deal with repeat expressions explicitly in `resolve_expr`.
|
||||
self.with_lifetime_rib(LifetimeRibKind::AnonConst, |this| {
|
||||
this.with_lifetime_rib(LifetimeRibKind::Elided(LifetimeRes::Static), |this| {
|
||||
this.with_lifetime_rib(LifetimeRibKind::Elided(LifetimeRes::Infer), |this| {
|
||||
this.resolve_anon_const(constant, IsRepeatExpr::No);
|
||||
})
|
||||
})
|
||||
|
@ -4126,7 +4126,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
ExprKind::Repeat(ref elem, ref ct) => {
|
||||
self.visit_expr(elem);
|
||||
self.with_lifetime_rib(LifetimeRibKind::AnonConst, |this| {
|
||||
this.with_lifetime_rib(LifetimeRibKind::Elided(LifetimeRes::Static), |this| {
|
||||
this.with_lifetime_rib(LifetimeRibKind::Elided(LifetimeRes::Infer), |this| {
|
||||
this.resolve_anon_const(ct, IsRepeatExpr::Yes)
|
||||
})
|
||||
});
|
||||
|
|
|
@ -1190,6 +1190,7 @@ impl Session {
|
|||
|
||||
/// Returns the number of query threads that should be used for this
|
||||
/// compilation
|
||||
#[inline]
|
||||
pub fn threads(&self) -> usize {
|
||||
self.opts.unstable_opts.threads
|
||||
}
|
||||
|
|
|
@ -207,6 +207,7 @@ symbols! {
|
|||
Input,
|
||||
Into,
|
||||
IntoDiagnostic,
|
||||
IntoFuture,
|
||||
IntoIterator,
|
||||
IoRead,
|
||||
IoWrite,
|
||||
|
@ -1635,6 +1636,7 @@ symbols! {
|
|||
write_bytes,
|
||||
write_macro,
|
||||
write_str,
|
||||
write_via_move,
|
||||
writeln_macro,
|
||||
x87_reg,
|
||||
xer,
|
||||
|
|
|
@ -148,8 +148,9 @@ pub fn is_enabled(
|
|||
pub fn is_stable(name: &str) -> Result<(), AbiDisabled> {
|
||||
match name {
|
||||
// Stable
|
||||
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
|
||||
| "system" | "efiapi" => Ok(()),
|
||||
"Rust" | "C" | "C-unwind" | "cdecl" | "cdecl-unwind" | "stdcall" | "stdcall-unwind"
|
||||
| "fastcall" | "fastcall-unwind" | "aapcs" | "aapcs-unwind" | "win64" | "win64-unwind"
|
||||
| "sysv64" | "sysv64-unwind" | "system" | "system-unwind" | "efiapi" => Ok(()),
|
||||
"rust-intrinsic" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::intrinsics,
|
||||
explain: "intrinsics are subject to change",
|
||||
|
@ -162,10 +163,18 @@ pub fn is_stable(name: &str) -> Result<(), AbiDisabled> {
|
|||
feature: sym::abi_vectorcall,
|
||||
explain: "vectorcall is experimental and subject to change",
|
||||
}),
|
||||
"vectorcall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::abi_vectorcall,
|
||||
explain: "vectorcall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"thiscall" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::abi_thiscall,
|
||||
explain: "thiscall is experimental and subject to change",
|
||||
}),
|
||||
"thiscall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::abi_thiscall,
|
||||
explain: "thiscall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"rust-call" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::unboxed_closures,
|
||||
explain: "rust-call ABI is subject to change",
|
||||
|
@ -202,46 +211,6 @@ pub fn is_stable(name: &str) -> Result<(), AbiDisabled> {
|
|||
feature: sym::abi_c_cmse_nonsecure_call,
|
||||
explain: "C-cmse-nonsecure-call ABI is experimental and subject to change",
|
||||
}),
|
||||
"C-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "C-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"stdcall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "stdcall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"system-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "system-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"thiscall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "thiscall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"cdecl-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "cdecl-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"fastcall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "fastcall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"vectorcall-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "vectorcall-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"aapcs-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "aapcs-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"win64-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "win64-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"sysv64-unwind" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::c_unwind,
|
||||
explain: "sysv64-unwind ABI is experimental and subject to change",
|
||||
}),
|
||||
"wasm" => Err(AbiDisabled::Unstable {
|
||||
feature: sym::wasm_abi,
|
||||
explain: "wasm ABI is experimental and subject to change",
|
||||
|
|
|
@ -1583,55 +1583,68 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
|||
}
|
||||
|
||||
fn suggest_remove_await(&self, obligation: &PredicateObligation<'tcx>, err: &mut Diagnostic) {
|
||||
let span = obligation.cause.span;
|
||||
let hir = self.tcx.hir();
|
||||
if let ObligationCauseCode::AwaitableExpr(Some(hir_id)) = obligation.cause.code().peel_derives()
|
||||
&& let hir::Node::Expr(expr) = hir.get(*hir_id)
|
||||
{
|
||||
// FIXME: use `obligation.predicate.kind()...trait_ref.self_ty()` to see if we have `()`
|
||||
// and if not maybe suggest doing something else? If we kept the expression around we
|
||||
// could also check if it is an fn call (very likely) and suggest changing *that*, if
|
||||
// it is from the local crate.
|
||||
|
||||
if let ObligationCauseCode::AwaitableExpr(hir_id) = obligation.cause.code().peel_derives() {
|
||||
let hir = self.tcx.hir();
|
||||
if let Some(hir::Node::Expr(expr)) = hir_id.and_then(|hir_id| hir.find(hir_id)) {
|
||||
// FIXME: use `obligation.predicate.kind()...trait_ref.self_ty()` to see if we have `()`
|
||||
// and if not maybe suggest doing something else? If we kept the expression around we
|
||||
// could also check if it is an fn call (very likely) and suggest changing *that*, if
|
||||
// it is from the local crate.
|
||||
// use nth(1) to skip one layer of desugaring from `IntoIter::into_iter`
|
||||
if let Some((_, hir::Node::Expr(await_expr))) = hir.parent_iter(*hir_id).nth(1)
|
||||
&& let Some(expr_span) = expr.span.find_ancestor_inside(await_expr.span)
|
||||
{
|
||||
let removal_span = self.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_extend_while(expr_span, char::is_whitespace)
|
||||
.unwrap_or(expr_span)
|
||||
.shrink_to_hi()
|
||||
.to(await_expr.span.shrink_to_hi());
|
||||
err.span_suggestion(
|
||||
span,
|
||||
removal_span,
|
||||
"remove the `.await`",
|
||||
"",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
// FIXME: account for associated `async fn`s.
|
||||
if let hir::Expr { span, kind: hir::ExprKind::Call(base, _), .. } = expr {
|
||||
if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) =
|
||||
obligation.predicate.kind().skip_binder()
|
||||
{
|
||||
err.span_label(*span, &format!("this call returns `{}`", pred.self_ty()));
|
||||
}
|
||||
if let Some(typeck_results) = &self.typeck_results
|
||||
&& let ty = typeck_results.expr_ty_adjusted(base)
|
||||
&& let ty::FnDef(def_id, _substs) = ty.kind()
|
||||
&& let Some(hir::Node::Item(hir::Item { ident, span, vis_span, .. })) =
|
||||
hir.get_if_local(*def_id)
|
||||
{
|
||||
let msg = format!(
|
||||
"alternatively, consider making `fn {}` asynchronous",
|
||||
ident
|
||||
);
|
||||
if vis_span.is_empty() {
|
||||
err.span_suggestion_verbose(
|
||||
span.shrink_to_lo(),
|
||||
&msg,
|
||||
"async ",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
err.span_suggestion_verbose(
|
||||
vis_span.shrink_to_hi(),
|
||||
&msg,
|
||||
" async",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
err.span_label(obligation.cause.span, "remove the `.await`");
|
||||
}
|
||||
// FIXME: account for associated `async fn`s.
|
||||
if let hir::Expr { span, kind: hir::ExprKind::Call(base, _), .. } = expr {
|
||||
if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) =
|
||||
obligation.predicate.kind().skip_binder()
|
||||
{
|
||||
err.span_label(*span, &format!("this call returns `{}`", pred.self_ty()));
|
||||
}
|
||||
if let Some(typeck_results) = &self.typeck_results
|
||||
&& let ty = typeck_results.expr_ty_adjusted(base)
|
||||
&& let ty::FnDef(def_id, _substs) = ty.kind()
|
||||
&& let Some(hir::Node::Item(hir::Item { ident, span, vis_span, .. })) =
|
||||
hir.get_if_local(*def_id)
|
||||
{
|
||||
let msg = format!(
|
||||
"alternatively, consider making `fn {}` asynchronous",
|
||||
ident
|
||||
);
|
||||
if vis_span.is_empty() {
|
||||
err.span_suggestion_verbose(
|
||||
span.shrink_to_lo(),
|
||||
&msg,
|
||||
"async ",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
err.span_suggestion_verbose(
|
||||
vis_span.shrink_to_hi(),
|
||||
&msg,
|
||||
" async",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1455,9 +1455,36 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Specialization trait used for `From<&[T]>`.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
trait BoxFromSlice<T> {
|
||||
fn from_slice(slice: &[T]) -> Self;
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: Clone> BoxFromSlice<T> for Box<[T]> {
|
||||
#[inline]
|
||||
default fn from_slice(slice: &[T]) -> Self {
|
||||
slice.to_vec().into_boxed_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: Copy> BoxFromSlice<T> for Box<[T]> {
|
||||
#[inline]
|
||||
fn from_slice(slice: &[T]) -> Self {
|
||||
let len = slice.len();
|
||||
let buf = RawVec::with_capacity(len);
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
|
||||
buf.into_box(slice.len()).assume_init()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[stable(feature = "box_from_slice", since = "1.17.0")]
|
||||
impl<T: Copy> From<&[T]> for Box<[T]> {
|
||||
impl<T: Clone> From<&[T]> for Box<[T]> {
|
||||
/// Converts a `&[T]` into a `Box<[T]>`
|
||||
///
|
||||
/// This conversion allocates on the heap
|
||||
|
@ -1471,19 +1498,15 @@ impl<T: Copy> From<&[T]> for Box<[T]> {
|
|||
///
|
||||
/// println!("{boxed_slice:?}");
|
||||
/// ```
|
||||
#[inline]
|
||||
fn from(slice: &[T]) -> Box<[T]> {
|
||||
let len = slice.len();
|
||||
let buf = RawVec::with_capacity(len);
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
|
||||
buf.into_box(slice.len()).assume_init()
|
||||
}
|
||||
<Self as BoxFromSlice<T>>::from_slice(slice)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[stable(feature = "box_from_cow", since = "1.45.0")]
|
||||
impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> {
|
||||
impl<T: Clone> From<Cow<'_, [T]>> for Box<[T]> {
|
||||
/// Converts a `Cow<'_, [T]>` into a `Box<[T]>`
|
||||
///
|
||||
/// When `cow` is the `Cow::Borrowed` variant, this
|
||||
|
|
|
@ -1321,13 +1321,13 @@ mod impls {
|
|||
(true, true) => Some(Equal),
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn lt(&self, other: &$t) -> bool { (*self) < (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn le(&self, other: &$t) -> bool { (*self) <= (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn ge(&self, other: &$t) -> bool { (*self) >= (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn gt(&self, other: &$t) -> bool { (*self) > (*other) }
|
||||
}
|
||||
)*)
|
||||
|
@ -1359,13 +1359,13 @@ mod impls {
|
|||
fn partial_cmp(&self, other: &$t) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn lt(&self, other: &$t) -> bool { (*self) < (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn le(&self, other: &$t) -> bool { (*self) <= (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn ge(&self, other: &$t) -> bool { (*self) >= (*other) }
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
fn gt(&self, other: &$t) -> bool { (*self) > (*other) }
|
||||
}
|
||||
|
||||
|
|
|
@ -99,6 +99,7 @@ use crate::future::Future;
|
|||
/// }
|
||||
/// ```
|
||||
#[stable(feature = "into_future", since = "1.64.0")]
|
||||
#[rustc_diagnostic_item = "IntoFuture"]
|
||||
pub trait IntoFuture {
|
||||
/// The output that the future will produce on completion.
|
||||
#[stable(feature = "into_future", since = "1.64.0")]
|
||||
|
|
|
@ -2257,12 +2257,23 @@ extern "rust-intrinsic" {
|
|||
/// This is an implementation detail of [`crate::ptr::read`] and should
|
||||
/// not be used anywhere else. See its comments for why this exists.
|
||||
///
|
||||
/// This intrinsic can *only* be called where the argument is a local without
|
||||
/// projections (`read_via_copy(p)`, not `read_via_copy(*p)`) so that it
|
||||
/// This intrinsic can *only* be called where the pointer is a local without
|
||||
/// projections (`read_via_copy(ptr)`, not `read_via_copy(*ptr)`) so that it
|
||||
/// trivially obeys runtime-MIR rules about derefs in operands.
|
||||
#[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
|
||||
#[rustc_nounwind]
|
||||
pub fn read_via_copy<T>(p: *const T) -> T;
|
||||
pub fn read_via_copy<T>(ptr: *const T) -> T;
|
||||
|
||||
/// This is an implementation detail of [`crate::ptr::write`] and should
|
||||
/// not be used anywhere else. See its comments for why this exists.
|
||||
///
|
||||
/// This intrinsic can *only* be called where the pointer is a local without
|
||||
/// projections (`write_via_move(ptr, x)`, not `write_via_move(*ptr, x)`) so
|
||||
/// that it trivially obeys runtime-MIR rules about derefs in operands.
|
||||
#[cfg(not(bootstrap))]
|
||||
#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
|
||||
#[rustc_nounwind]
|
||||
pub fn write_via_move<T>(ptr: *mut T, value: T);
|
||||
|
||||
/// Returns the value of the discriminant for the variant in 'v';
|
||||
/// if `T` has no discriminant, returns `0`.
|
||||
|
@ -2828,3 +2839,16 @@ pub const unsafe fn transmute_unchecked<Src, Dst>(src: Src) -> Dst {
|
|||
// SAFETY: It's a transmute -- the caller promised it's fine.
|
||||
unsafe { transmute_copy(&ManuallyDrop::new(src)) }
|
||||
}
|
||||
|
||||
/// Polyfill for bootstrap
|
||||
#[cfg(bootstrap)]
|
||||
pub const unsafe fn write_via_move<T>(ptr: *mut T, value: T) {
|
||||
use crate::mem::*;
|
||||
// SAFETY: the caller must guarantee that `dst` is valid for writes.
|
||||
// `dst` cannot overlap `src` because the caller has mutable access
|
||||
// to `dst` while `src` is owned by this function.
|
||||
unsafe {
|
||||
copy_nonoverlapping::<T>(&value, ptr, 1);
|
||||
forget(value);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ use crate::hash::Hasher;
|
|||
/// operations. Its cousin [`sync::Arc`][arc] does use atomic operations (incurring
|
||||
/// some overhead) and thus is `Send`.
|
||||
///
|
||||
/// See [the Nomicon](../../nomicon/send-and-sync.html) for more details.
|
||||
/// See [the Nomicon](../../nomicon/send-and-sync.html) and the [`Sync`] trait for more details.
|
||||
///
|
||||
/// [`Rc`]: ../../std/rc/struct.Rc.html
|
||||
/// [arc]: ../../std/sync/struct.Arc.html
|
||||
|
@ -426,6 +426,11 @@ pub macro Copy($item:item) {
|
|||
/// becomes read-only, as if it were a `& &T`. Hence there is no risk
|
||||
/// of a data race.
|
||||
///
|
||||
/// A shorter overview of how [`Sync`] and [`Send`] relate to referencing:
|
||||
/// * `&T` is [`Send`] if and only if `T` is [`Sync`]
|
||||
/// * `&mut T` is [`Send`] if and only if `T` is [`Send`]
|
||||
/// * `&T` and `&mut T` are [`Sync`] if and only if `T` is [`Sync`]
|
||||
///
|
||||
/// Types that are not `Sync` are those that have "interior
|
||||
/// mutability" in a non-thread-safe form, such as [`Cell`][cell]
|
||||
/// and [`RefCell`][refcell]. These types allow for mutation of
|
||||
|
|
|
@ -1007,7 +1007,7 @@ impl<T> Option<T> {
|
|||
{
|
||||
match self {
|
||||
Some(x) => x,
|
||||
None => Default::default(),
|
||||
None => T::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1615,11 +1615,7 @@ impl<T> Option<T> {
|
|||
where
|
||||
T: Default,
|
||||
{
|
||||
fn default<T: Default>() -> T {
|
||||
T::default()
|
||||
}
|
||||
|
||||
self.get_or_insert_with(default)
|
||||
self.get_or_insert_with(T::default)
|
||||
}
|
||||
|
||||
/// Inserts a value computed from `f` into the option if it is [`None`],
|
||||
|
|
|
@ -1349,13 +1349,13 @@ pub const unsafe fn read_unaligned<T>(src: *const T) -> T {
|
|||
#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
|
||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||
pub const unsafe fn write<T>(dst: *mut T, src: T) {
|
||||
// We are calling the intrinsics directly to avoid function calls in the generated code
|
||||
// as `intrinsics::copy_nonoverlapping` is a wrapper function.
|
||||
extern "rust-intrinsic" {
|
||||
#[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
|
||||
#[rustc_nounwind]
|
||||
fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
|
||||
}
|
||||
// Semantically, it would be fine for this to be implemented as a
|
||||
// `copy_nonoverlapping` and appropriate drop suppression of `src`.
|
||||
|
||||
// However, implementing via that currently produces more MIR than is ideal.
|
||||
// Using an intrinsic keeps it down to just the simple `*dst = move src` in
|
||||
// MIR (11 statements shorter, at the time of writing), and also allows
|
||||
// `src` to stay an SSA value in codegen_ssa, rather than a memory one.
|
||||
|
||||
// SAFETY: the caller must guarantee that `dst` is valid for writes.
|
||||
// `dst` cannot overlap `src` because the caller has mutable access
|
||||
|
@ -1365,8 +1365,7 @@ pub const unsafe fn write<T>(dst: *mut T, src: T) {
|
|||
"ptr::write requires that the pointer argument is aligned and non-null",
|
||||
[T](dst: *mut T) => is_aligned_and_not_null(dst)
|
||||
);
|
||||
copy_nonoverlapping(&src as *const T, dst, 1);
|
||||
intrinsics::forget(src);
|
||||
intrinsics::write_via_move(dst, src)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -60,10 +60,17 @@ impl<'a, T> IntoIterator for &'a mut [T] {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[must_use = "iterators are lazy and do nothing unless consumed"]
|
||||
pub struct Iter<'a, T: 'a> {
|
||||
/// The pointer to the next element to return, or the past-the-end location
|
||||
/// if the iterator is empty.
|
||||
///
|
||||
/// This address will be used for all ZST elements, never changed.
|
||||
ptr: NonNull<T>,
|
||||
end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
|
||||
// ptr == end is a quick test for the Iterator being empty, that works
|
||||
// for both ZST and non-ZST.
|
||||
/// For non-ZSTs, the non-null pointer to the past-the-end element.
|
||||
///
|
||||
/// For ZSTs, this is `ptr.wrapping_byte_add(len)`.
|
||||
///
|
||||
/// For all types, `ptr == end` tests whether the iterator is empty.
|
||||
end: *const T,
|
||||
_marker: PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
|
@ -179,10 +186,17 @@ impl<T> AsRef<[T]> for Iter<'_, T> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[must_use = "iterators are lazy and do nothing unless consumed"]
|
||||
pub struct IterMut<'a, T: 'a> {
|
||||
/// The pointer to the next element to return, or the past-the-end location
|
||||
/// if the iterator is empty.
|
||||
///
|
||||
/// This address will be used for all ZST elements, never changed.
|
||||
ptr: NonNull<T>,
|
||||
end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
|
||||
// ptr == end is a quick test for the Iterator being empty, that works
|
||||
// for both ZST and non-ZST.
|
||||
/// For non-ZSTs, the non-null pointer to the past-the-end element.
|
||||
///
|
||||
/// For ZSTs, this is `ptr.wrapping_byte_add(len)`.
|
||||
///
|
||||
/// For all types, `ptr == end` tests whether the iterator is empty.
|
||||
end: *mut T,
|
||||
_marker: PhantomData<&'a mut T>,
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ static EXCEPTION_TYPE_INFO: TypeInfo = TypeInfo {
|
|||
name: b"rust_panic\0".as_ptr(),
|
||||
};
|
||||
|
||||
// NOTE(nbdd0121): The `canary` field will be part of stable ABI after `c_unwind` stabilization.
|
||||
// NOTE(nbdd0121): The `canary` field is part of stable ABI.
|
||||
#[repr(C)]
|
||||
struct Exception {
|
||||
// See `gcc.rs` on why this is present. We already have a static here so just use it.
|
||||
|
|
|
@ -48,8 +48,8 @@ use unwind as uw;
|
|||
static CANARY: u8 = 0;
|
||||
|
||||
// NOTE(nbdd0121)
|
||||
// Once `c_unwind` feature is stabilized, there will be ABI stability requirement
|
||||
// on this struct. The first two field must be `_Unwind_Exception` and `canary`,
|
||||
// There is an ABI stability requirement on this struct.
|
||||
// The first two field must be `_Unwind_Exception` and `canary`,
|
||||
// as it may be accessed by a different version of the std with a different compiler.
|
||||
#[repr(C)]
|
||||
struct Exception {
|
||||
|
|
|
@ -52,7 +52,7 @@ use core::mem::{self, ManuallyDrop};
|
|||
use core::ptr;
|
||||
use libc::{c_int, c_uint, c_void};
|
||||
|
||||
// NOTE(nbdd0121): The `canary` field will be part of stable ABI after `c_unwind` stabilization.
|
||||
// NOTE(nbdd0121): The `canary` field is part of stable ABI.
|
||||
#[repr(C)]
|
||||
struct Exception {
|
||||
// See `gcc.rs` on why this is present. We already have a static here so just use it.
|
||||
|
|
|
@ -709,6 +709,7 @@ impl File {
|
|||
// `AsRawHandle`/`IntoRawHandle`/`FromRawHandle` on Windows.
|
||||
|
||||
impl AsInner<fs_imp::File> for File {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::File {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -1087,12 +1088,14 @@ impl OpenOptions {
|
|||
}
|
||||
|
||||
impl AsInner<fs_imp::OpenOptions> for OpenOptions {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::OpenOptions {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInnerMut<fs_imp::OpenOptions> for OpenOptions {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut fs_imp::OpenOptions {
|
||||
&mut self.0
|
||||
}
|
||||
|
@ -1352,6 +1355,7 @@ impl fmt::Debug for Metadata {
|
|||
}
|
||||
|
||||
impl AsInner<fs_imp::FileAttr> for Metadata {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::FileAttr {
|
||||
&self.0
|
||||
}
|
||||
|
@ -1604,6 +1608,7 @@ impl FileType {
|
|||
}
|
||||
|
||||
impl AsInner<fs_imp::FileType> for FileType {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::FileType {
|
||||
&self.0
|
||||
}
|
||||
|
@ -1616,6 +1621,7 @@ impl FromInner<fs_imp::FilePermissions> for Permissions {
|
|||
}
|
||||
|
||||
impl AsInner<fs_imp::FilePermissions> for Permissions {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::FilePermissions {
|
||||
&self.0
|
||||
}
|
||||
|
@ -1770,6 +1776,7 @@ impl fmt::Debug for DirEntry {
|
|||
}
|
||||
|
||||
impl AsInner<fs_imp::DirEntry> for DirEntry {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &fs_imp::DirEntry {
|
||||
&self.0
|
||||
}
|
||||
|
@ -2510,6 +2517,7 @@ impl DirBuilder {
|
|||
}
|
||||
|
||||
impl AsInnerMut<fs_imp::DirBuilder> for DirBuilder {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut fs_imp::DirBuilder {
|
||||
&mut self.inner
|
||||
}
|
||||
|
|
|
@ -691,6 +691,7 @@ impl Write for &TcpStream {
|
|||
}
|
||||
|
||||
impl AsInner<net_imp::TcpStream> for TcpStream {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &net_imp::TcpStream {
|
||||
&self.0
|
||||
}
|
||||
|
@ -1033,6 +1034,7 @@ impl Iterator for IntoIncoming {
|
|||
impl FusedIterator for IntoIncoming {}
|
||||
|
||||
impl AsInner<net_imp::TcpListener> for TcpListener {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &net_imp::TcpListener {
|
||||
&self.0
|
||||
}
|
||||
|
|
|
@ -788,6 +788,7 @@ impl UdpSocket {
|
|||
// `AsRawSocket`/`IntoRawSocket`/`FromRawSocket` on Windows.
|
||||
|
||||
impl AsInner<net_imp::UdpSocket> for UdpSocket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &net_imp::UdpSocket {
|
||||
&self.0
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ pub struct PidFd {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for PidFd {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -70,6 +71,7 @@ impl IntoInner<FileDesc> for PidFd {
|
|||
}
|
||||
|
||||
impl AsRawFd for PidFd {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.as_inner().as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -1395,11 +1395,16 @@ impl PathBuf {
|
|||
///
|
||||
/// let mut buf = PathBuf::from("/");
|
||||
/// assert!(buf.file_name() == None);
|
||||
/// buf.set_file_name("bar");
|
||||
/// assert!(buf == PathBuf::from("/bar"));
|
||||
///
|
||||
/// buf.set_file_name("foo.txt");
|
||||
/// assert!(buf == PathBuf::from("/foo.txt"));
|
||||
/// assert!(buf.file_name().is_some());
|
||||
/// buf.set_file_name("baz.txt");
|
||||
/// assert!(buf == PathBuf::from("/baz.txt"));
|
||||
///
|
||||
/// buf.set_file_name("bar.txt");
|
||||
/// assert!(buf == PathBuf::from("/bar.txt"));
|
||||
///
|
||||
/// buf.set_file_name("baz");
|
||||
/// assert!(buf == PathBuf::from("/baz"));
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn set_file_name<S: AsRef<OsStr>>(&mut self, file_name: S) {
|
||||
|
@ -2562,7 +2567,8 @@ impl Path {
|
|||
/// ```
|
||||
/// use std::path::{Path, PathBuf};
|
||||
///
|
||||
/// let path = Path::new("/tmp/foo.txt");
|
||||
/// let path = Path::new("/tmp/foo.png");
|
||||
/// assert_eq!(path.with_file_name("bar"), PathBuf::from("/tmp/bar"));
|
||||
/// assert_eq!(path.with_file_name("bar.txt"), PathBuf::from("/tmp/bar.txt"));
|
||||
///
|
||||
/// let path = Path::new("/tmp");
|
||||
|
|
|
@ -211,6 +211,7 @@ pub struct Child {
|
|||
impl crate::sealed::Sealed for Child {}
|
||||
|
||||
impl AsInner<imp::Process> for Child {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &imp::Process {
|
||||
&self.handle
|
||||
}
|
||||
|
@ -304,6 +305,7 @@ impl Write for &ChildStdin {
|
|||
}
|
||||
|
||||
impl AsInner<AnonPipe> for ChildStdin {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &AnonPipe {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -373,6 +375,7 @@ impl Read for ChildStdout {
|
|||
}
|
||||
|
||||
impl AsInner<AnonPipe> for ChildStdout {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &AnonPipe {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -438,6 +441,7 @@ impl Read for ChildStderr {
|
|||
}
|
||||
|
||||
impl AsInner<AnonPipe> for ChildStderr {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &AnonPipe {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -1107,12 +1111,14 @@ impl fmt::Debug for Command {
|
|||
}
|
||||
|
||||
impl AsInner<imp::Command> for Command {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &imp::Command {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInnerMut<imp::Command> for Command {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut imp::Command {
|
||||
&mut self.inner
|
||||
}
|
||||
|
@ -1605,6 +1611,7 @@ impl ExitStatus {
|
|||
}
|
||||
|
||||
impl AsInner<imp::ExitStatus> for ExitStatus {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &imp::ExitStatus {
|
||||
&self.0
|
||||
}
|
||||
|
@ -1884,6 +1891,7 @@ impl From<u8> for ExitCode {
|
|||
}
|
||||
|
||||
impl AsInner<imp::ExitCode> for ExitCode {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &imp::ExitCode {
|
||||
&self.0
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ impl FromRawFd for FileDesc {
|
|||
}
|
||||
|
||||
impl AsInner<OwnedFd> for FileDesc {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &OwnedFd {
|
||||
&self.fd
|
||||
}
|
||||
|
|
|
@ -367,12 +367,14 @@ impl DirBuilder {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for File {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInnerMut<FileDesc> for File {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut FileDesc {
|
||||
&mut self.0
|
||||
}
|
||||
|
@ -397,6 +399,7 @@ impl AsFd for File {
|
|||
}
|
||||
|
||||
impl AsRawFd for File {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -340,6 +340,7 @@ impl Socket {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.0
|
||||
}
|
||||
|
@ -364,6 +365,7 @@ impl AsFd for Socket {
|
|||
}
|
||||
|
||||
impl AsRawFd for Socket {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -62,6 +62,7 @@ impl FileDesc {
|
|||
}
|
||||
|
||||
impl AsInner<Fd> for FileDesc {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Fd {
|
||||
&self.fd
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ impl Socket {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -220,6 +221,7 @@ impl TcpStream {
|
|||
}
|
||||
|
||||
impl AsInner<Socket> for TcpStream {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -304,6 +306,7 @@ impl TcpListener {
|
|||
}
|
||||
|
||||
impl AsInner<Socket> for TcpListener {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -112,6 +112,7 @@ impl FileDesc {
|
|||
}
|
||||
|
||||
impl AsInner<c_int> for FileDesc {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &c_int {
|
||||
&self.fd
|
||||
}
|
||||
|
@ -462,6 +463,7 @@ impl Socket {
|
|||
}
|
||||
|
||||
impl AsInner<c_int> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &c_int {
|
||||
self.0.as_inner()
|
||||
}
|
||||
|
|
|
@ -481,6 +481,7 @@ impl<'a> Read for &'a FileDesc {
|
|||
}
|
||||
|
||||
impl AsInner<OwnedFd> for FileDesc {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &OwnedFd {
|
||||
&self.0
|
||||
}
|
||||
|
@ -505,6 +506,7 @@ impl AsFd for FileDesc {
|
|||
}
|
||||
|
||||
impl AsRawFd for FileDesc {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -547,6 +547,7 @@ impl FileAttr {
|
|||
}
|
||||
|
||||
impl AsInner<stat64> for FileAttr {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &stat64 {
|
||||
&self.stat
|
||||
}
|
||||
|
@ -1193,8 +1194,6 @@ impl File {
|
|||
None => Ok(libc::timespec { tv_sec: 0, tv_nsec: libc::UTIME_OMIT as _ }),
|
||||
}
|
||||
};
|
||||
#[cfg(not(any(target_os = "redox", target_os = "espidf", target_os = "horizon")))]
|
||||
let times = [to_timespec(times.accessed)?, to_timespec(times.modified)?];
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "redox", target_os = "espidf", target_os = "horizon"))] {
|
||||
// Redox doesn't appear to support `UTIME_OMIT`.
|
||||
|
@ -1206,6 +1205,7 @@ impl File {
|
|||
"setting file times not supported",
|
||||
))
|
||||
} else if #[cfg(any(target_os = "android", target_os = "macos"))] {
|
||||
let times = [to_timespec(times.accessed)?, to_timespec(times.modified)?];
|
||||
// futimens requires macOS 10.13, and Android API level 19
|
||||
cvt(unsafe {
|
||||
weak!(fn futimens(c_int, *const libc::timespec) -> c_int);
|
||||
|
@ -1232,6 +1232,22 @@ impl File {
|
|||
})?;
|
||||
Ok(())
|
||||
} else {
|
||||
#[cfg(all(target_os = "linux", target_env = "gnu", target_pointer_width = "32", not(target_arch = "riscv32")))]
|
||||
{
|
||||
use crate::sys::{time::__timespec64, weak::weak};
|
||||
|
||||
// Added in glibc 2.34
|
||||
weak!(fn __futimens64(libc::c_int, *const __timespec64) -> libc::c_int);
|
||||
|
||||
if let Some(futimens64) = __futimens64.get() {
|
||||
let to_timespec = |time: Option<SystemTime>| time.map(|time| time.t.to_timespec64())
|
||||
.unwrap_or(__timespec64::new(0, libc::UTIME_OMIT as _));
|
||||
let times = [to_timespec(times.accessed), to_timespec(times.modified)];
|
||||
cvt(unsafe { futimens64(self.as_raw_fd(), times.as_ptr()) })?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let times = [to_timespec(times.accessed)?, to_timespec(times.modified)?];
|
||||
cvt(unsafe { libc::futimens(self.as_raw_fd(), times.as_ptr()) })?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1254,12 +1270,14 @@ impl DirBuilder {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for File {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInnerMut<FileDesc> for File {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut FileDesc {
|
||||
&mut self.0
|
||||
}
|
||||
|
@ -1284,6 +1302,7 @@ impl AsFd for File {
|
|||
}
|
||||
|
||||
impl AsRawFd for File {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -129,6 +129,7 @@ pub mod net {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.0
|
||||
}
|
||||
|
@ -153,6 +154,7 @@ pub mod net {
|
|||
}
|
||||
|
||||
impl AsRawFd for Socket {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
@ -183,6 +185,7 @@ pub mod net {
|
|||
unimpl!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -305,6 +308,7 @@ pub mod net {
|
|||
unimpl!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -371,6 +375,7 @@ pub mod net {
|
|||
unimpl!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -490,6 +490,7 @@ impl Socket {
|
|||
}
|
||||
|
||||
impl AsInner<FileDesc> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &FileDesc {
|
||||
&self.0
|
||||
}
|
||||
|
@ -514,6 +515,7 @@ impl AsFd for Socket {
|
|||
}
|
||||
|
||||
impl AsRawFd for Socket {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -89,6 +89,7 @@ impl IntoInner<Vec<u8>> for Buf {
|
|||
}
|
||||
|
||||
impl AsInner<[u8]> for Buf {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &[u8] {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -135,6 +135,7 @@ pub fn read2(p1: AnonPipe, v1: &mut Vec<u8>, p2: AnonPipe, v2: &mut Vec<u8>) ->
|
|||
}
|
||||
|
||||
impl AsRawFd for AnonPipe {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -166,6 +166,16 @@ impl Timespec {
|
|||
}
|
||||
self.to_timespec()
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
target_os = "linux",
|
||||
target_env = "gnu",
|
||||
target_pointer_width = "32",
|
||||
not(target_arch = "riscv32")
|
||||
))]
|
||||
pub fn to_timespec64(&self) -> __timespec64 {
|
||||
__timespec64::new(self.tv_sec, self.tv_nsec.0 as _)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<libc::timespec> for Timespec {
|
||||
|
@ -190,6 +200,18 @@ pub(in crate::sys::unix) struct __timespec64 {
|
|||
_padding: i32,
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
target_os = "linux",
|
||||
target_env = "gnu",
|
||||
target_pointer_width = "32",
|
||||
not(target_arch = "riscv32")
|
||||
))]
|
||||
impl __timespec64 {
|
||||
pub(in crate::sys::unix) fn new(tv_sec: i64, tv_nsec: i32) -> Self {
|
||||
Self { tv_sec, tv_nsec, _padding: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
target_os = "linux",
|
||||
target_env = "gnu",
|
||||
|
|
|
@ -275,12 +275,14 @@ impl WasiFd {
|
|||
}
|
||||
|
||||
impl AsInner<OwnedFd> for WasiFd {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &OwnedFd {
|
||||
&self.fd
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInnerMut<OwnedFd> for WasiFd {
|
||||
#[inline]
|
||||
fn as_inner_mut(&mut self) -> &mut OwnedFd {
|
||||
&mut self.fd
|
||||
}
|
||||
|
@ -305,6 +307,7 @@ impl AsFd for WasiFd {
|
|||
}
|
||||
|
||||
impl AsRawFd for WasiFd {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.fd.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -498,6 +498,7 @@ impl File {
|
|||
}
|
||||
|
||||
impl AsInner<WasiFd> for File {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &WasiFd {
|
||||
&self.fd
|
||||
}
|
||||
|
@ -522,6 +523,7 @@ impl AsFd for File {
|
|||
}
|
||||
|
||||
impl AsRawFd for File {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.fd.as_raw_fd()
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ pub struct TcpStream {
|
|||
}
|
||||
|
||||
impl AsInner<WasiFd> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &WasiFd {
|
||||
&self.0
|
||||
}
|
||||
|
@ -41,6 +42,7 @@ impl AsFd for Socket {
|
|||
}
|
||||
|
||||
impl AsRawFd for Socket {
|
||||
#[inline]
|
||||
fn as_raw_fd(&self) -> RawFd {
|
||||
self.0.as_raw_fd()
|
||||
}
|
||||
|
@ -184,6 +186,7 @@ impl TcpStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -274,6 +277,7 @@ impl TcpListener {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -284,6 +288,7 @@ impl TcpListener {
|
|||
}
|
||||
|
||||
impl AsInner<Socket> for TcpListener {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -436,6 +441,7 @@ impl UdpSocket {
|
|||
unsupported()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -446,6 +452,7 @@ impl UdpSocket {
|
|||
}
|
||||
|
||||
impl AsInner<Socket> for UdpSocket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -832,6 +832,7 @@ fn open_link_no_reparse(parent: &File, name: &[u16], access: u32) -> io::Result<
|
|||
}
|
||||
|
||||
impl AsInner<Handle> for File {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Handle {
|
||||
&self.handle
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ impl Handle {
|
|||
}
|
||||
|
||||
impl AsInner<OwnedHandle> for Handle {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &OwnedHandle {
|
||||
&self.0
|
||||
}
|
||||
|
|
|
@ -446,6 +446,7 @@ impl<'a> Read for &'a Socket {
|
|||
}
|
||||
|
||||
impl AsInner<OwnedSocket> for Socket {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &OwnedSocket {
|
||||
&self.0
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ impl FromInner<Wtf8Buf> for Buf {
|
|||
}
|
||||
|
||||
impl AsInner<Wtf8> for Buf {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Wtf8 {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -239,6 +239,7 @@ impl TcpStream {
|
|||
Ok(TcpStream { inner: sock })
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -352,6 +353,7 @@ impl TcpStream {
|
|||
}
|
||||
|
||||
impl AsInner<Socket> for TcpStream {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -427,6 +429,7 @@ impl TcpListener {
|
|||
Ok(TcpListener { inner: sock })
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
@ -517,6 +520,7 @@ impl UdpSocket {
|
|||
Ok(UdpSocket { inner: sock })
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn socket(&self) -> &Socket {
|
||||
&self.inner
|
||||
}
|
||||
|
|
|
@ -501,6 +501,7 @@ pub struct Wtf8 {
|
|||
}
|
||||
|
||||
impl AsInner<[u8]> for Wtf8 {
|
||||
#[inline]
|
||||
fn as_inner(&self) -> &[u8] {
|
||||
&self.bytes
|
||||
}
|
||||
|
|
|
@ -119,7 +119,7 @@ pub use core::time::TryFromFloatSecsError;
|
|||
/// [QueryPerformanceCounter]: https://docs.microsoft.com/en-us/windows/win32/api/profileapi/nf-profileapi-queryperformancecounter
|
||||
/// [`insecure_time` usercall]: https://edp.fortanix.com/docs/api/fortanix_sgx_abi/struct.Usercalls.html#method.insecure_time
|
||||
/// [timekeeping in SGX]: https://edp.fortanix.com/docs/concepts/rust-std/#codestdtimecode
|
||||
/// [__wasi_clock_time_get (Monotonic Clock)]: https://github.com/WebAssembly/WASI/blob/master/phases/snapshot/docs.md#clock_time_get
|
||||
/// [__wasi_clock_time_get (Monotonic Clock)]: https://github.com/WebAssembly/WASI/blob/main/legacy/preview1/docs.md#clock_time_get
|
||||
/// [clock_gettime (Monotonic Clock)]: https://linux.die.net/man/3/clock_gettime
|
||||
/// [mach_absolute_time]: https://developer.apple.com/library/archive/documentation/Darwin/Conceptual/KernelProgramming/services/services.html
|
||||
///
|
||||
|
@ -224,7 +224,7 @@ pub struct Instant(time::Instant);
|
|||
/// [timekeeping in SGX]: https://edp.fortanix.com/docs/concepts/rust-std/#codestdtimecode
|
||||
/// [gettimeofday]: https://man7.org/linux/man-pages/man2/gettimeofday.2.html
|
||||
/// [clock_gettime (Realtime Clock)]: https://linux.die.net/man/3/clock_gettime
|
||||
/// [__wasi_clock_time_get (Realtime Clock)]: https://github.com/WebAssembly/WASI/blob/master/phases/snapshot/docs.md#clock_time_get
|
||||
/// [__wasi_clock_time_get (Realtime Clock)]: https://github.com/WebAssembly/WASI/blob/main/legacy/preview1/docs.md#clock_time_get
|
||||
/// [GetSystemTimePreciseAsFileTime]: https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtimepreciseasfiletime
|
||||
/// [GetSystemTimeAsFileTime]: https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtimeasfiletime
|
||||
///
|
||||
|
|
|
@ -13,6 +13,7 @@ import tarfile
|
|||
import tempfile
|
||||
|
||||
from time import time
|
||||
from multiprocessing import Pool, cpu_count
|
||||
|
||||
try:
|
||||
import lzma
|
||||
|
@ -27,6 +28,20 @@ if platform_is_win32():
|
|||
else:
|
||||
EXE_SUFFIX = ""
|
||||
|
||||
def get_cpus():
|
||||
if hasattr(os, "sched_getaffinity"):
|
||||
return len(os.sched_getaffinity(0))
|
||||
if hasattr(os, "cpu_count"):
|
||||
cpus = os.cpu_count()
|
||||
if cpus is not None:
|
||||
return cpus
|
||||
try:
|
||||
return cpu_count()
|
||||
except NotImplementedError:
|
||||
return 1
|
||||
|
||||
|
||||
|
||||
def get(base, url, path, checksums, verbose=False):
|
||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||
temp_path = temp_file.name
|
||||
|
@ -42,23 +57,23 @@ def get(base, url, path, checksums, verbose=False):
|
|||
if os.path.exists(path):
|
||||
if verify(path, sha256, False):
|
||||
if verbose:
|
||||
print("using already-download file", path)
|
||||
print("using already-download file", path, file=sys.stderr)
|
||||
return
|
||||
else:
|
||||
if verbose:
|
||||
print("ignoring already-download file",
|
||||
path, "due to failed verification")
|
||||
path, "due to failed verification", file=sys.stderr)
|
||||
os.unlink(path)
|
||||
download(temp_path, "{}/{}".format(base, url), True, verbose)
|
||||
if not verify(temp_path, sha256, verbose):
|
||||
raise RuntimeError("failed verification")
|
||||
if verbose:
|
||||
print("moving {} to {}".format(temp_path, path))
|
||||
print("moving {} to {}".format(temp_path, path), file=sys.stderr)
|
||||
shutil.move(temp_path, path)
|
||||
finally:
|
||||
if os.path.isfile(temp_path):
|
||||
if verbose:
|
||||
print("removing", temp_path)
|
||||
print("removing", temp_path, file=sys.stderr)
|
||||
os.unlink(temp_path)
|
||||
|
||||
|
||||
|
@ -68,7 +83,7 @@ def download(path, url, probably_big, verbose):
|
|||
_download(path, url, probably_big, verbose, True)
|
||||
return
|
||||
except RuntimeError:
|
||||
print("\nspurious failure, trying again")
|
||||
print("\nspurious failure, trying again", file=sys.stderr)
|
||||
_download(path, url, probably_big, verbose, False)
|
||||
|
||||
|
||||
|
@ -79,7 +94,7 @@ def _download(path, url, probably_big, verbose, exception):
|
|||
# - If we are on win32 fallback to powershell
|
||||
# - Otherwise raise the error if appropriate
|
||||
if probably_big or verbose:
|
||||
print("downloading {}".format(url))
|
||||
print("downloading {}".format(url), file=sys.stderr)
|
||||
|
||||
try:
|
||||
if probably_big or verbose:
|
||||
|
@ -115,20 +130,20 @@ def _download(path, url, probably_big, verbose, exception):
|
|||
def verify(path, expected, verbose):
|
||||
"""Check if the sha256 sum of the given path is valid"""
|
||||
if verbose:
|
||||
print("verifying", path)
|
||||
print("verifying", path, file=sys.stderr)
|
||||
with open(path, "rb") as source:
|
||||
found = hashlib.sha256(source.read()).hexdigest()
|
||||
verified = found == expected
|
||||
if not verified:
|
||||
print("invalid checksum:\n"
|
||||
" found: {}\n"
|
||||
" expected: {}".format(found, expected))
|
||||
" expected: {}".format(found, expected), file=sys.stderr)
|
||||
return verified
|
||||
|
||||
|
||||
def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
|
||||
"""Unpack the given tarball file"""
|
||||
print("extracting", tarball)
|
||||
print("extracting", tarball, file=sys.stderr)
|
||||
fname = os.path.basename(tarball).replace(tarball_suffix, "")
|
||||
with contextlib.closing(tarfile.open(tarball)) as tar:
|
||||
for member in tar.getnames():
|
||||
|
@ -141,7 +156,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
|
|||
|
||||
dst_path = os.path.join(dst, name)
|
||||
if verbose:
|
||||
print(" extracting", member)
|
||||
print(" extracting", member, file=sys.stderr)
|
||||
tar.extract(member, dst)
|
||||
src_path = os.path.join(dst, member)
|
||||
if os.path.isdir(src_path) and os.path.exists(dst_path):
|
||||
|
@ -153,7 +168,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
|
|||
def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
|
||||
"""Run a child program in a new process"""
|
||||
if verbose:
|
||||
print("running: " + ' '.join(args))
|
||||
print("running: " + ' '.join(args), file=sys.stderr)
|
||||
sys.stdout.flush()
|
||||
# Ensure that the .exe is used on Windows just in case a Linux ELF has been
|
||||
# compiled in the same directory.
|
||||
|
@ -193,8 +208,8 @@ def require(cmd, exit=True, exception=False):
|
|||
if exception:
|
||||
raise
|
||||
elif exit:
|
||||
print("error: unable to run `{}`: {}".format(' '.join(cmd), exc))
|
||||
print("Please make sure it's installed and in the path.")
|
||||
print("error: unable to run `{}`: {}".format(' '.join(cmd), exc), file=sys.stderr)
|
||||
print("Please make sure it's installed and in the path.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return None
|
||||
|
||||
|
@ -218,8 +233,8 @@ def default_build_triple(verbose):
|
|||
|
||||
if sys.platform == 'darwin':
|
||||
if verbose:
|
||||
print("not using rustc detection as it is unreliable on macOS")
|
||||
print("falling back to auto-detect")
|
||||
print("not using rustc detection as it is unreliable on macOS", file=sys.stderr)
|
||||
print("falling back to auto-detect", file=sys.stderr)
|
||||
else:
|
||||
try:
|
||||
version = subprocess.check_output(["rustc", "--version", "--verbose"],
|
||||
|
@ -228,12 +243,14 @@ def default_build_triple(verbose):
|
|||
host = next(x for x in version.split('\n') if x.startswith("host: "))
|
||||
triple = host.split("host: ")[1]
|
||||
if verbose:
|
||||
print("detected default triple {} from pre-installed rustc".format(triple))
|
||||
print("detected default triple {} from pre-installed rustc".format(triple),
|
||||
file=sys.stderr)
|
||||
return triple
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print("pre-installed rustc not detected: {}".format(e))
|
||||
print("falling back to auto-detect")
|
||||
print("pre-installed rustc not detected: {}".format(e),
|
||||
file=sys.stderr)
|
||||
print("falling back to auto-detect", file=sys.stderr)
|
||||
|
||||
required = not platform_is_win32()
|
||||
ostype = require(["uname", "-s"], exit=required)
|
||||
|
@ -404,6 +421,48 @@ class Stage0Toolchain:
|
|||
return self.version + "-" + self.date
|
||||
|
||||
|
||||
class DownloadInfo:
|
||||
"""A helper class that can be pickled into a parallel subprocess"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_download_url,
|
||||
download_path,
|
||||
bin_root,
|
||||
tarball_path,
|
||||
tarball_suffix,
|
||||
checksums_sha256,
|
||||
pattern,
|
||||
verbose,
|
||||
):
|
||||
self.base_download_url = base_download_url
|
||||
self.download_path = download_path
|
||||
self.bin_root = bin_root
|
||||
self.tarball_path = tarball_path
|
||||
self.tarball_suffix = tarball_suffix
|
||||
self.checksums_sha256 = checksums_sha256
|
||||
self.pattern = pattern
|
||||
self.verbose = verbose
|
||||
|
||||
def download_component(download_info):
|
||||
if not os.path.exists(download_info.tarball_path):
|
||||
get(
|
||||
download_info.base_download_url,
|
||||
download_info.download_path,
|
||||
download_info.tarball_path,
|
||||
download_info.checksums_sha256,
|
||||
verbose=download_info.verbose,
|
||||
)
|
||||
|
||||
def unpack_component(download_info):
|
||||
unpack(
|
||||
download_info.tarball_path,
|
||||
download_info.tarball_suffix,
|
||||
download_info.bin_root,
|
||||
match=download_info.pattern,
|
||||
verbose=download_info.verbose,
|
||||
)
|
||||
|
||||
class RustBuild(object):
|
||||
"""Provide all the methods required to build Rust"""
|
||||
def __init__(self):
|
||||
|
@ -458,17 +517,53 @@ class RustBuild(object):
|
|||
)
|
||||
run_powershell([script])
|
||||
shutil.rmtree(bin_root)
|
||||
|
||||
key = self.stage0_compiler.date
|
||||
cache_dst = os.path.join(self.build_dir, "cache")
|
||||
rustc_cache = os.path.join(cache_dst, key)
|
||||
if not os.path.exists(rustc_cache):
|
||||
os.makedirs(rustc_cache)
|
||||
|
||||
tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz'
|
||||
filename = "rust-std-{}-{}{}".format(
|
||||
rustc_channel, self.build, tarball_suffix)
|
||||
pattern = "rust-std-{}".format(self.build)
|
||||
self._download_component_helper(filename, pattern, tarball_suffix)
|
||||
filename = "rustc-{}-{}{}".format(rustc_channel, self.build,
|
||||
tarball_suffix)
|
||||
self._download_component_helper(filename, "rustc", tarball_suffix)
|
||||
filename = "cargo-{}-{}{}".format(rustc_channel, self.build,
|
||||
tarball_suffix)
|
||||
self._download_component_helper(filename, "cargo", tarball_suffix)
|
||||
|
||||
toolchain_suffix = "{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
|
||||
|
||||
tarballs_to_download = [
|
||||
("rust-std-{}".format(toolchain_suffix), "rust-std-{}".format(self.build)),
|
||||
("rustc-{}".format(toolchain_suffix), "rustc"),
|
||||
("cargo-{}".format(toolchain_suffix), "cargo"),
|
||||
]
|
||||
|
||||
tarballs_download_info = [
|
||||
DownloadInfo(
|
||||
base_download_url=self.download_url,
|
||||
download_path="dist/{}/{}".format(self.stage0_compiler.date, filename),
|
||||
bin_root=self.bin_root(),
|
||||
tarball_path=os.path.join(rustc_cache, filename),
|
||||
tarball_suffix=tarball_suffix,
|
||||
checksums_sha256=self.checksums_sha256,
|
||||
pattern=pattern,
|
||||
verbose=self.verbose,
|
||||
)
|
||||
for filename, pattern in tarballs_to_download
|
||||
]
|
||||
|
||||
# Download the components serially to show the progress bars properly.
|
||||
for download_info in tarballs_download_info:
|
||||
download_component(download_info)
|
||||
|
||||
# Unpack the tarballs in parallle.
|
||||
# In Python 2.7, Pool cannot be used as a context manager.
|
||||
pool_size = min(len(tarballs_download_info), get_cpus())
|
||||
if self.verbose:
|
||||
print('Choosing a pool size of', pool_size, 'for the unpacking of the tarballs')
|
||||
p = Pool(pool_size)
|
||||
try:
|
||||
p.map(unpack_component, tarballs_download_info)
|
||||
finally:
|
||||
p.close()
|
||||
p.join()
|
||||
|
||||
if self.should_fix_bins_and_dylibs():
|
||||
self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root))
|
||||
|
||||
|
@ -484,13 +579,9 @@ class RustBuild(object):
|
|||
rust_stamp.write(key)
|
||||
|
||||
def _download_component_helper(
|
||||
self, filename, pattern, tarball_suffix,
|
||||
self, filename, pattern, tarball_suffix, rustc_cache,
|
||||
):
|
||||
key = self.stage0_compiler.date
|
||||
cache_dst = os.path.join(self.build_dir, "cache")
|
||||
rustc_cache = os.path.join(cache_dst, key)
|
||||
if not os.path.exists(rustc_cache):
|
||||
os.makedirs(rustc_cache)
|
||||
|
||||
tarball = os.path.join(rustc_cache, filename)
|
||||
if not os.path.exists(tarball):
|
||||
|
@ -545,7 +636,7 @@ class RustBuild(object):
|
|||
|
||||
answer = self._should_fix_bins_and_dylibs = get_answer()
|
||||
if answer:
|
||||
print("info: You seem to be using Nix.")
|
||||
print("info: You seem to be using Nix.", file=sys.stderr)
|
||||
return answer
|
||||
|
||||
def fix_bin_or_dylib(self, fname):
|
||||
|
@ -558,7 +649,7 @@ class RustBuild(object):
|
|||
Please see https://nixos.org/patchelf.html for more information
|
||||
"""
|
||||
assert self._should_fix_bins_and_dylibs is True
|
||||
print("attempting to patch", fname)
|
||||
print("attempting to patch", fname, file=sys.stderr)
|
||||
|
||||
# Only build `.nix-deps` once.
|
||||
nix_deps_dir = self.nix_deps_dir
|
||||
|
@ -591,7 +682,7 @@ class RustBuild(object):
|
|||
"nix-build", "-E", nix_expr, "-o", nix_deps_dir,
|
||||
])
|
||||
except subprocess.CalledProcessError as reason:
|
||||
print("warning: failed to call nix-build:", reason)
|
||||
print("warning: failed to call nix-build:", reason, file=sys.stderr)
|
||||
return
|
||||
self.nix_deps_dir = nix_deps_dir
|
||||
|
||||
|
@ -611,7 +702,7 @@ class RustBuild(object):
|
|||
try:
|
||||
subprocess.check_output([patchelf] + patchelf_args + [fname])
|
||||
except subprocess.CalledProcessError as reason:
|
||||
print("warning: failed to call patchelf:", reason)
|
||||
print("warning: failed to call patchelf:", reason, file=sys.stderr)
|
||||
return
|
||||
|
||||
def rustc_stamp(self):
|
||||
|
@ -755,7 +846,7 @@ class RustBuild(object):
|
|||
if "GITHUB_ACTIONS" in env:
|
||||
print("::group::Building bootstrap")
|
||||
else:
|
||||
print("Building bootstrap")
|
||||
print("Building bootstrap", file=sys.stderr)
|
||||
build_dir = os.path.join(self.build_dir, "bootstrap")
|
||||
if self.clean and os.path.exists(build_dir):
|
||||
shutil.rmtree(build_dir)
|
||||
|
@ -849,9 +940,12 @@ class RustBuild(object):
|
|||
if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
|
||||
if os.getuid() == 0:
|
||||
self.use_vendored_sources = True
|
||||
print('info: looks like you\'re trying to run this command as root')
|
||||
print(' and so in order to preserve your $HOME this will now')
|
||||
print(' use vendored sources by default.')
|
||||
print('info: looks like you\'re trying to run this command as root',
|
||||
file=sys.stderr)
|
||||
print(' and so in order to preserve your $HOME this will now',
|
||||
file=sys.stderr)
|
||||
print(' use vendored sources by default.',
|
||||
file=sys.stderr)
|
||||
|
||||
cargo_dir = os.path.join(self.rust_root, '.cargo')
|
||||
if self.use_vendored_sources:
|
||||
|
@ -861,14 +955,18 @@ class RustBuild(object):
|
|||
"--sync ./src/tools/rust-analyzer/Cargo.toml " \
|
||||
"--sync ./compiler/rustc_codegen_cranelift/Cargo.toml " \
|
||||
"--sync ./src/bootstrap/Cargo.toml "
|
||||
print('error: vendoring required, but vendor directory does not exist.')
|
||||
print('error: vendoring required, but vendor directory does not exist.',
|
||||
file=sys.stderr)
|
||||
print(' Run `cargo vendor {}` to initialize the '
|
||||
'vendor directory.'.format(sync_dirs))
|
||||
print('Alternatively, use the pre-vendored `rustc-src` dist component.')
|
||||
'vendor directory.'.format(sync_dirs),
|
||||
file=sys.stderr)
|
||||
print('Alternatively, use the pre-vendored `rustc-src` dist component.',
|
||||
file=sys.stderr)
|
||||
raise Exception("{} not found".format(vendor_dir))
|
||||
|
||||
if not os.path.exists(cargo_dir):
|
||||
print('error: vendoring required, but .cargo/config does not exist.')
|
||||
print('error: vendoring required, but .cargo/config does not exist.',
|
||||
file=sys.stderr)
|
||||
raise Exception("{} not found".format(cargo_dir))
|
||||
else:
|
||||
if os.path.exists(cargo_dir):
|
||||
|
@ -978,7 +1076,7 @@ def main():
|
|||
print(
|
||||
"info: Downloading and building bootstrap before processing --help command.\n"
|
||||
" See src/bootstrap/README.md for help with common commands."
|
||||
)
|
||||
, file=sys.stderr)
|
||||
|
||||
exit_code = 0
|
||||
success_word = "successfully"
|
||||
|
@ -989,11 +1087,12 @@ def main():
|
|||
exit_code = error.code
|
||||
else:
|
||||
exit_code = 1
|
||||
print(error)
|
||||
print(error, file=sys.stderr)
|
||||
success_word = "unsuccessfully"
|
||||
|
||||
if not help_triggered:
|
||||
print("Build completed", success_word, "in", format_build_time(time() - start_time))
|
||||
print("Build completed", success_word, "in", format_build_time(time() - start_time),
|
||||
file=sys.stderr)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
|
|
|
@ -634,6 +634,14 @@ impl Kind {
|
|||
Kind::Suggest => "suggest",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn test_description(&self) -> &'static str {
|
||||
match self {
|
||||
Kind::Test => "Testing",
|
||||
Kind::Bench => "Benchmarking",
|
||||
_ => panic!("not a test command: {}!", self.as_str()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Builder<'a> {
|
||||
|
@ -695,7 +703,6 @@ impl<'a> Builder<'a> {
|
|||
crate::toolstate::ToolStateCheck,
|
||||
test::ExpandYamlAnchors,
|
||||
test::Tidy,
|
||||
test::TidySelfTest,
|
||||
test::Ui,
|
||||
test::RunPassValgrind,
|
||||
test::MirOpt,
|
||||
|
@ -711,11 +718,9 @@ impl<'a> Builder<'a> {
|
|||
test::CrateLibrustc,
|
||||
test::CrateRustdoc,
|
||||
test::CrateRustdocJsonTypes,
|
||||
test::CrateJsonDocLint,
|
||||
test::SuggestTestsCrate,
|
||||
test::CrateBootstrap,
|
||||
test::Linkcheck,
|
||||
test::TierCheck,
|
||||
test::ReplacePlaceholderTest,
|
||||
test::Cargotest,
|
||||
test::Cargo,
|
||||
test::RustAnalyzer,
|
||||
|
|
|
@ -578,7 +578,6 @@ mod dist {
|
|||
compiler: Compiler { host, stage: 0 },
|
||||
target: host,
|
||||
mode: Mode::Std,
|
||||
test_kind: test::TestKind::Test,
|
||||
crates: vec![INTERNER.intern_str("std")],
|
||||
},]
|
||||
);
|
||||
|
|
|
@ -1309,7 +1309,7 @@ impl Config {
|
|||
if config.llvm_from_ci {
|
||||
let triple = &config.build.triple;
|
||||
let ci_llvm_bin = config.ci_llvm_root().join("bin");
|
||||
let mut build_target = config
|
||||
let build_target = config
|
||||
.target_config
|
||||
.entry(config.build)
|
||||
.or_insert_with(|| Target::from_triple(&triple));
|
||||
|
|
|
@ -112,7 +112,7 @@ impl Config {
|
|||
is_nixos && !Path::new("/lib").exists()
|
||||
});
|
||||
if val {
|
||||
println!("info: You seem to be using Nix.");
|
||||
eprintln!("info: You seem to be using Nix.");
|
||||
}
|
||||
val
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ impl Config {
|
|||
curl.stdout(Stdio::from(f));
|
||||
if !self.check_run(&mut curl) {
|
||||
if self.build.contains("windows-msvc") {
|
||||
println!("Fallback to PowerShell");
|
||||
eprintln!("Fallback to PowerShell");
|
||||
for _ in 0..3 {
|
||||
if self.try_run(Command::new("PowerShell.exe").args(&[
|
||||
"/nologo",
|
||||
|
@ -239,7 +239,7 @@ impl Config {
|
|||
])) {
|
||||
return;
|
||||
}
|
||||
println!("\nspurious failure, trying again");
|
||||
eprintln!("\nspurious failure, trying again");
|
||||
}
|
||||
}
|
||||
if !help_on_error.is_empty() {
|
||||
|
@ -250,7 +250,7 @@ impl Config {
|
|||
}
|
||||
|
||||
fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) {
|
||||
println!("extracting {} to {}", tarball.display(), dst.display());
|
||||
eprintln!("extracting {} to {}", tarball.display(), dst.display());
|
||||
if !dst.exists() {
|
||||
t!(fs::create_dir_all(dst));
|
||||
}
|
||||
|
@ -541,7 +541,18 @@ impl Config {
|
|||
None
|
||||
};
|
||||
|
||||
self.download_file(&format!("{base_url}/{url}"), &tarball, "");
|
||||
let mut help_on_error = "";
|
||||
if destination == "ci-rustc" {
|
||||
help_on_error = "error: failed to download pre-built rustc from CI
|
||||
|
||||
note: old builds get deleted after a certain time
|
||||
help: if trying to compile an old commit of rustc, disable `download-rustc` in config.toml:
|
||||
|
||||
[rust]
|
||||
download-rustc = false
|
||||
";
|
||||
}
|
||||
self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error);
|
||||
if let Some(sha256) = checksum {
|
||||
if !self.verify(&tarball, sha256) {
|
||||
panic!("failed to verify {}", tarball.display());
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue