Auto merge of #109384 - matthiaskrgr:rollup-hu348gs, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #109170 (Set `CMAKE_SYSTEM_NAME` for Linux targets) - #109266 (rustdoc: Correctly merge import's and its target's docs in one more case) - #109267 (Add tests for configure.py) - #109273 (Make `slice::is_sorted_by` implementation nicer) - #109277 (Fix generics_of for impl's RPITIT synthesized associated type) - #109307 (Ignore `Inlined` spans when computing caller location.) - #109364 (Only expect a GAT const param for `type_of` of GAT const arg) - #109365 (Update mdbook) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
febd59e122
32 changed files with 446 additions and 246 deletions
|
@ -3103,9 +3103,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mdbook"
|
name = "mdbook"
|
||||||
version = "0.4.25"
|
version = "0.4.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d1ed28d5903dde77bd5182645078a37ee57014cac6ccb2d54e1d6496386648e4"
|
checksum = "764dcbfc2e5f868bc1b566eb179dff1a06458fd0cff846aae2579392dd3f01a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ammonia",
|
"ammonia",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
|
|
@ -1475,7 +1475,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
) -> OperandRef<'tcx, Bx::Value> {
|
) -> OperandRef<'tcx, Bx::Value> {
|
||||||
let tcx = bx.tcx();
|
let tcx = bx.tcx();
|
||||||
|
|
||||||
let mut span_to_caller_location = |span: Span| {
|
let mut span_to_caller_location = |mut span: Span| {
|
||||||
|
// Remove `Inlined` marks as they pollute `expansion_cause`.
|
||||||
|
while span.is_inlined() {
|
||||||
|
span.remove_mark();
|
||||||
|
}
|
||||||
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
|
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
|
||||||
let caller = tcx.sess.source_map().lookup_char_pos(topmost.lo());
|
let caller = tcx.sess.source_map().lookup_char_pos(topmost.lo());
|
||||||
let const_loc = tcx.const_caller_location((
|
let const_loc = tcx.const_caller_location((
|
||||||
|
|
|
@ -111,7 +111,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
location
|
location
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
|
pub(crate) fn location_triple_for_span(&self, mut span: Span) -> (Symbol, u32, u32) {
|
||||||
|
// Remove `Inlined` marks as they pollute `expansion_cause`.
|
||||||
|
while span.is_inlined() {
|
||||||
|
span.remove_mark();
|
||||||
|
}
|
||||||
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
|
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
|
||||||
let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
|
let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
|
||||||
(
|
(
|
||||||
|
|
|
@ -3152,8 +3152,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||||
|
|
||||||
debug!("impl_trait_ty_to_ty: generics={:?}", generics);
|
debug!("impl_trait_ty_to_ty: generics={:?}", generics);
|
||||||
let substs = InternalSubsts::for_item(tcx, def_id, |param, _| {
|
let substs = InternalSubsts::for_item(tcx, def_id, |param, _| {
|
||||||
if let Some(i) = (param.index as usize).checked_sub(generics.parent_count) {
|
// We use `generics.count() - lifetimes.len()` here instead of `generics.parent_count`
|
||||||
// Our own parameters are the resolved lifetimes.
|
// since return-position impl trait in trait squashes all of the generics from its source fn
|
||||||
|
// into its own generics, so the opaque's "own" params isn't always just lifetimes.
|
||||||
|
if let Some(i) = (param.index as usize).checked_sub(generics.count() - lifetimes.len())
|
||||||
|
{
|
||||||
|
// Resolve our own lifetime parameters.
|
||||||
let GenericParamDefKind::Lifetime { .. } = param.kind else { bug!() };
|
let GenericParamDefKind::Lifetime { .. } = param.kind else { bug!() };
|
||||||
let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] else { bug!() };
|
let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] else { bug!() };
|
||||||
self.ast_region_to_region(lifetime, None).into()
|
self.ast_region_to_region(lifetime, None).into()
|
||||||
|
|
|
@ -278,8 +278,11 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
}
|
}
|
||||||
TraitItemKind::Const(ty, body_id) => body_id
|
TraitItemKind::Const(ty, body_id) => body_id
|
||||||
.and_then(|body_id| {
|
.and_then(|body_id| {
|
||||||
is_suggestable_infer_ty(ty)
|
is_suggestable_infer_ty(ty).then(|| {
|
||||||
.then(|| infer_placeholder_type(tcx, def_id, body_id, ty.span, item.ident, "constant",))
|
infer_placeholder_type(
|
||||||
|
tcx, def_id, body_id, ty.span, item.ident, "constant",
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| icx.to_ty(ty)),
|
.unwrap_or_else(|| icx.to_ty(ty)),
|
||||||
TraitItemKind::Type(_, Some(ty)) => icx.to_ty(ty),
|
TraitItemKind::Type(_, Some(ty)) => icx.to_ty(ty),
|
||||||
|
@ -335,14 +338,15 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ItemKind::TyAlias(self_ty, _) => icx.to_ty(self_ty),
|
ItemKind::TyAlias(self_ty, _) => icx.to_ty(self_ty),
|
||||||
ItemKind::Impl(hir::Impl { self_ty, .. }) => {
|
ItemKind::Impl(hir::Impl { self_ty, .. }) => match self_ty.find_self_aliases() {
|
||||||
match self_ty.find_self_aliases() {
|
spans if spans.len() > 0 => {
|
||||||
spans if spans.len() > 0 => {
|
let guar = tcx.sess.emit_err(crate::errors::SelfInImplSelf {
|
||||||
let guar = tcx.sess.emit_err(crate::errors::SelfInImplSelf { span: spans.into(), note: () });
|
span: spans.into(),
|
||||||
tcx.ty_error(guar)
|
note: (),
|
||||||
},
|
});
|
||||||
_ => icx.to_ty(*self_ty),
|
tcx.ty_error(guar)
|
||||||
}
|
}
|
||||||
|
_ => icx.to_ty(*self_ty),
|
||||||
},
|
},
|
||||||
ItemKind::Fn(..) => {
|
ItemKind::Fn(..) => {
|
||||||
let substs = InternalSubsts::identity_for_item(tcx, def_id.to_def_id());
|
let substs = InternalSubsts::identity_for_item(tcx, def_id.to_def_id());
|
||||||
|
@ -364,7 +368,10 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
if in_trait && !tcx.impl_defaultness(owner).has_value() {
|
if in_trait && !tcx.impl_defaultness(owner).has_value() {
|
||||||
span_bug!(tcx.def_span(def_id), "tried to get type of this RPITIT with no definition");
|
span_bug!(
|
||||||
|
tcx.def_span(def_id),
|
||||||
|
"tried to get type of this RPITIT with no definition"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
find_opaque_ty_constraints_for_rpit(tcx, def_id, owner)
|
find_opaque_ty_constraints_for_rpit(tcx, def_id, owner)
|
||||||
}
|
}
|
||||||
|
@ -453,15 +460,12 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
tcx.adt_def(tcx.hir().get_parent_item(hir_id)).repr().discr_type().to_ty(tcx)
|
tcx.adt_def(tcx.hir().get_parent_item(hir_id)).repr().discr_type().to_ty(tcx)
|
||||||
}
|
}
|
||||||
|
|
||||||
Node::TypeBinding(
|
Node::TypeBinding(TypeBinding {
|
||||||
TypeBinding {
|
hir_id: binding_id,
|
||||||
hir_id: binding_id,
|
kind: TypeBindingKind::Equality { term: Term::Const(e) },
|
||||||
kind: TypeBindingKind::Equality { term: Term::Const(e) },
|
ident,
|
||||||
ident,
|
..
|
||||||
..
|
}) if let Node::TraitRef(trait_ref) = tcx.hir().get_parent(*binding_id)
|
||||||
},
|
|
||||||
) if let Node::TraitRef(trait_ref) =
|
|
||||||
tcx.hir().get_parent(*binding_id)
|
|
||||||
&& e.hir_id == hir_id =>
|
&& e.hir_id == hir_id =>
|
||||||
{
|
{
|
||||||
let Some(trait_def_id) = trait_ref.trait_def_id() else {
|
let Some(trait_def_id) = trait_ref.trait_def_id() else {
|
||||||
|
@ -475,7 +479,9 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
def_id.to_def_id(),
|
def_id.to_def_id(),
|
||||||
);
|
);
|
||||||
if let Some(assoc_item) = assoc_item {
|
if let Some(assoc_item) = assoc_item {
|
||||||
tcx.type_of(assoc_item.def_id).subst_identity()
|
tcx.type_of(assoc_item.def_id)
|
||||||
|
.no_bound_vars()
|
||||||
|
.expect("const parameter types cannot be generic")
|
||||||
} else {
|
} else {
|
||||||
// FIXME(associated_const_equality): add a useful error message here.
|
// FIXME(associated_const_equality): add a useful error message here.
|
||||||
tcx.ty_error_with_message(
|
tcx.ty_error_with_message(
|
||||||
|
@ -485,10 +491,13 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Node::TypeBinding(
|
Node::TypeBinding(TypeBinding {
|
||||||
TypeBinding { hir_id: binding_id, gen_args, kind, ident, .. },
|
hir_id: binding_id,
|
||||||
) if let Node::TraitRef(trait_ref) =
|
gen_args,
|
||||||
tcx.hir().get_parent(*binding_id)
|
kind,
|
||||||
|
ident,
|
||||||
|
..
|
||||||
|
}) if let Node::TraitRef(trait_ref) = tcx.hir().get_parent(*binding_id)
|
||||||
&& let Some((idx, _)) =
|
&& let Some((idx, _)) =
|
||||||
gen_args.args.iter().enumerate().find(|(_, arg)| {
|
gen_args.args.iter().enumerate().find(|(_, arg)| {
|
||||||
if let GenericArg::Const(ct) = arg {
|
if let GenericArg::Const(ct) = arg {
|
||||||
|
@ -517,15 +526,18 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<Ty<'_>>
|
||||||
},
|
},
|
||||||
def_id.to_def_id(),
|
def_id.to_def_id(),
|
||||||
);
|
);
|
||||||
if let Some(param)
|
if let Some(assoc_item) = assoc_item
|
||||||
= assoc_item.map(|item| &tcx.generics_of(item.def_id).params[idx]).filter(|param| param.kind.is_ty_or_const())
|
&& let param = &tcx.generics_of(assoc_item.def_id).params[idx]
|
||||||
|
&& matches!(param.kind, ty::GenericParamDefKind::Const { .. })
|
||||||
{
|
{
|
||||||
tcx.type_of(param.def_id).subst_identity()
|
tcx.type_of(param.def_id)
|
||||||
|
.no_bound_vars()
|
||||||
|
.expect("const parameter types cannot be generic")
|
||||||
} else {
|
} else {
|
||||||
// FIXME(associated_const_equality): add a useful error message here.
|
// FIXME(associated_const_equality): add a useful error message here.
|
||||||
tcx.ty_error_with_message(
|
tcx.ty_error_with_message(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
"Could not find associated const on trait",
|
"Could not find const param on associated item",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -880,7 +880,7 @@ impl Span {
|
||||||
pub fn fresh_expansion(self, expn_id: LocalExpnId) -> Span {
|
pub fn fresh_expansion(self, expn_id: LocalExpnId) -> Span {
|
||||||
HygieneData::with(|data| {
|
HygieneData::with(|data| {
|
||||||
self.with_ctxt(data.apply_mark(
|
self.with_ctxt(data.apply_mark(
|
||||||
SyntaxContext::root(),
|
self.ctxt(),
|
||||||
expn_id.to_expn_id(),
|
expn_id.to_expn_id(),
|
||||||
Transparency::Transparent,
|
Transparency::Transparent,
|
||||||
))
|
))
|
||||||
|
|
|
@ -396,6 +396,8 @@ fn associated_type_for_impl_trait_in_impl(
|
||||||
impl_assoc_ty.impl_defaultness(tcx.impl_defaultness(impl_fn_def_id));
|
impl_assoc_ty.impl_defaultness(tcx.impl_defaultness(impl_fn_def_id));
|
||||||
|
|
||||||
// Copy generics_of the trait's associated item but the impl as the parent.
|
// Copy generics_of the trait's associated item but the impl as the parent.
|
||||||
|
// FIXME(-Zlower-impl-trait-in-trait-to-assoc-ty) resolves to the trait instead of the impl
|
||||||
|
// generics.
|
||||||
impl_assoc_ty.generics_of({
|
impl_assoc_ty.generics_of({
|
||||||
let trait_assoc_generics = tcx.generics_of(trait_assoc_def_id);
|
let trait_assoc_generics = tcx.generics_of(trait_assoc_def_id);
|
||||||
let trait_assoc_parent_count = trait_assoc_generics.parent_count;
|
let trait_assoc_parent_count = trait_assoc_generics.parent_count;
|
||||||
|
@ -404,16 +406,10 @@ fn associated_type_for_impl_trait_in_impl(
|
||||||
let parent_generics = tcx.generics_of(impl_def_id);
|
let parent_generics = tcx.generics_of(impl_def_id);
|
||||||
let parent_count = parent_generics.parent_count + parent_generics.params.len();
|
let parent_count = parent_generics.parent_count + parent_generics.params.len();
|
||||||
|
|
||||||
let mut impl_fn_params = tcx.generics_of(impl_fn_def_id).params.clone();
|
|
||||||
|
|
||||||
for param in &mut params {
|
for param in &mut params {
|
||||||
param.index = param.index + parent_count as u32 + impl_fn_params.len() as u32
|
param.index = param.index + parent_count as u32 - trait_assoc_parent_count as u32;
|
||||||
- trait_assoc_parent_count as u32;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_fn_params.extend(params);
|
|
||||||
params = impl_fn_params;
|
|
||||||
|
|
||||||
let param_def_id_to_index =
|
let param_def_id_to_index =
|
||||||
params.iter().map(|param| (param.def_id, param.index)).collect();
|
params.iter().map(|param| (param.def_id, param.index)).collect();
|
||||||
|
|
||||||
|
|
|
@ -132,9 +132,7 @@ iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
|
F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
|
||||||
{
|
{
|
||||||
self.as_slice().windows(2).all(|w| {
|
self.as_slice().is_sorted_by(|a, b| compare(&a, &b))
|
||||||
compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
|
|
@ -3822,7 +3822,7 @@ impl<T> [T] {
|
||||||
where
|
where
|
||||||
F: FnMut(&'a T, &'a T) -> Option<Ordering>,
|
F: FnMut(&'a T, &'a T) -> Option<Ordering>,
|
||||||
{
|
{
|
||||||
self.iter().is_sorted_by(|a, b| compare(*a, *b))
|
self.array_windows().all(|[a, b]| compare(a, b).map_or(false, Ordering::is_le))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the elements of this slice are sorted using the given key extraction function.
|
/// Checks if the elements of this slice are sorted using the given key extraction function.
|
||||||
|
|
|
@ -11,6 +11,7 @@ import sys
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
|
|
||||||
import bootstrap
|
import bootstrap
|
||||||
|
import configure
|
||||||
|
|
||||||
|
|
||||||
class VerifyTestCase(unittest.TestCase):
|
class VerifyTestCase(unittest.TestCase):
|
||||||
|
@ -74,12 +75,50 @@ class ProgramOutOfDate(unittest.TestCase):
|
||||||
self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key))
|
self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key))
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateAndParseConfig(unittest.TestCase):
|
||||||
|
"""Test that we can serialize and deserialize a config.toml file"""
|
||||||
|
def serialize_and_parse(self, args):
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
section_order, sections, targets = configure.parse_args(args)
|
||||||
|
buffer = StringIO()
|
||||||
|
configure.write_config_toml(buffer, section_order, targets, sections)
|
||||||
|
build = bootstrap.RustBuild()
|
||||||
|
build.config_toml = buffer.getvalue()
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tomllib
|
||||||
|
# Verify this is actually valid TOML.
|
||||||
|
tomllib.loads(build.config_toml)
|
||||||
|
except ImportError:
|
||||||
|
print("warning: skipping TOML validation, need at least python 3.11", file=sys.stderr)
|
||||||
|
return build
|
||||||
|
|
||||||
|
def test_no_args(self):
|
||||||
|
build = self.serialize_and_parse([])
|
||||||
|
self.assertEqual(build.get_toml("changelog-seen"), '2')
|
||||||
|
self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
|
||||||
|
|
||||||
|
def test_set_section(self):
|
||||||
|
build = self.serialize_and_parse(["--set", "llvm.download-ci-llvm"])
|
||||||
|
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true')
|
||||||
|
|
||||||
|
def test_set_target(self):
|
||||||
|
build = self.serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
|
||||||
|
self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
|
||||||
|
|
||||||
|
# Uncomment when #108928 is fixed.
|
||||||
|
# def test_set_top_level(self):
|
||||||
|
# build = self.serialize_and_parse(["--set", "profile=compiler"])
|
||||||
|
# self.assertEqual(build.get_toml("profile"), 'compiler')
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
SUITE = unittest.TestSuite()
|
SUITE = unittest.TestSuite()
|
||||||
TEST_LOADER = unittest.TestLoader()
|
TEST_LOADER = unittest.TestLoader()
|
||||||
SUITE.addTest(doctest.DocTestSuite(bootstrap))
|
SUITE.addTest(doctest.DocTestSuite(bootstrap))
|
||||||
SUITE.addTests([
|
SUITE.addTests([
|
||||||
TEST_LOADER.loadTestsFromTestCase(VerifyTestCase),
|
TEST_LOADER.loadTestsFromTestCase(VerifyTestCase),
|
||||||
|
TEST_LOADER.loadTestsFromTestCase(GenerateAndParseConfig),
|
||||||
TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)])
|
TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)])
|
||||||
|
|
||||||
RUNNER = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
|
RUNNER = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
|
||||||
|
|
|
@ -205,77 +205,78 @@ if '--help' in sys.argv or '-h' in sys.argv:
|
||||||
|
|
||||||
# Parse all command line arguments into one of these three lists, handling
|
# Parse all command line arguments into one of these three lists, handling
|
||||||
# boolean and value-based options separately
|
# boolean and value-based options separately
|
||||||
unknown_args = []
|
def parse_args(args):
|
||||||
need_value_args = []
|
unknown_args = []
|
||||||
known_args = {}
|
need_value_args = []
|
||||||
|
known_args = {}
|
||||||
|
|
||||||
p("processing command line")
|
i = 0
|
||||||
i = 1
|
while i < len(args):
|
||||||
while i < len(sys.argv):
|
arg = args[i]
|
||||||
arg = sys.argv[i]
|
i += 1
|
||||||
i += 1
|
if not arg.startswith('--'):
|
||||||
if not arg.startswith('--'):
|
unknown_args.append(arg)
|
||||||
unknown_args.append(arg)
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
for option in options:
|
for option in options:
|
||||||
value = None
|
value = None
|
||||||
if option.value:
|
if option.value:
|
||||||
keyval = arg[2:].split('=', 1)
|
keyval = arg[2:].split('=', 1)
|
||||||
key = keyval[0]
|
key = keyval[0]
|
||||||
if option.name != key:
|
if option.name != key:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if len(keyval) > 1:
|
if len(keyval) > 1:
|
||||||
value = keyval[1]
|
value = keyval[1]
|
||||||
elif i < len(sys.argv):
|
elif i < len(args):
|
||||||
value = sys.argv[i]
|
value = args[i]
|
||||||
i += 1
|
i += 1
|
||||||
|
else:
|
||||||
|
need_value_args.append(arg)
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
need_value_args.append(arg)
|
if arg[2:] == 'enable-' + option.name:
|
||||||
continue
|
value = True
|
||||||
else:
|
elif arg[2:] == 'disable-' + option.name:
|
||||||
if arg[2:] == 'enable-' + option.name:
|
value = False
|
||||||
value = True
|
else:
|
||||||
elif arg[2:] == 'disable-' + option.name:
|
continue
|
||||||
value = False
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
found = True
|
found = True
|
||||||
if option.name not in known_args:
|
if option.name not in known_args:
|
||||||
known_args[option.name] = []
|
known_args[option.name] = []
|
||||||
known_args[option.name].append((option, value))
|
known_args[option.name].append((option, value))
|
||||||
break
|
break
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
unknown_args.append(arg)
|
unknown_args.append(arg)
|
||||||
p("")
|
|
||||||
|
|
||||||
# Note: here and a few other places, we use [-1] to apply the *last* value
|
# Note: here and a few other places, we use [-1] to apply the *last* value
|
||||||
# passed. But if option-checking is enabled, then the known_args loop will
|
# passed. But if option-checking is enabled, then the known_args loop will
|
||||||
# also assert that options are only passed once.
|
# also assert that options are only passed once.
|
||||||
option_checking = ('option-checking' not in known_args
|
option_checking = ('option-checking' not in known_args
|
||||||
or known_args['option-checking'][-1][1])
|
or known_args['option-checking'][-1][1])
|
||||||
if option_checking:
|
if option_checking:
|
||||||
if len(unknown_args) > 0:
|
if len(unknown_args) > 0:
|
||||||
err("Option '" + unknown_args[0] + "' is not recognized")
|
err("Option '" + unknown_args[0] + "' is not recognized")
|
||||||
if len(need_value_args) > 0:
|
if len(need_value_args) > 0:
|
||||||
err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
|
err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
|
||||||
|
|
||||||
# Parse all known arguments into a configuration structure that reflects the
|
config = {}
|
||||||
# TOML we're going to write out
|
|
||||||
config = {}
|
set('build.configure-args', sys.argv[1:], config)
|
||||||
|
apply_args(known_args, option_checking, config)
|
||||||
|
return parse_example_config(known_args, config)
|
||||||
|
|
||||||
|
|
||||||
def build():
|
def build(known_args):
|
||||||
if 'build' in known_args:
|
if 'build' in known_args:
|
||||||
return known_args['build'][-1][1]
|
return known_args['build'][-1][1]
|
||||||
return bootstrap.default_build_triple(verbose=False)
|
return bootstrap.default_build_triple(verbose=False)
|
||||||
|
|
||||||
|
|
||||||
def set(key, value):
|
def set(key, value, config):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
# Remove empty values, which value.split(',') tends to generate.
|
# Remove empty values, which value.split(',') tends to generate.
|
||||||
value = [v for v in value if v]
|
value = [v for v in value if v]
|
||||||
|
@ -297,75 +298,76 @@ def set(key, value):
|
||||||
arr = arr[part]
|
arr = arr[part]
|
||||||
|
|
||||||
|
|
||||||
for key in known_args:
|
def apply_args(known_args, option_checking, config):
|
||||||
# The `set` option is special and can be passed a bunch of times
|
for key in known_args:
|
||||||
if key == 'set':
|
# The `set` option is special and can be passed a bunch of times
|
||||||
for option, value in known_args[key]:
|
if key == 'set':
|
||||||
keyval = value.split('=', 1)
|
for option, value in known_args[key]:
|
||||||
if len(keyval) == 1 or keyval[1] == "true":
|
keyval = value.split('=', 1)
|
||||||
value = True
|
if len(keyval) == 1 or keyval[1] == "true":
|
||||||
elif keyval[1] == "false":
|
value = True
|
||||||
value = False
|
elif keyval[1] == "false":
|
||||||
else:
|
value = False
|
||||||
value = keyval[1]
|
else:
|
||||||
set(keyval[0], value)
|
value = keyval[1]
|
||||||
continue
|
set(keyval[0], value, config)
|
||||||
|
continue
|
||||||
|
|
||||||
# Ensure each option is only passed once
|
# Ensure each option is only passed once
|
||||||
arr = known_args[key]
|
arr = known_args[key]
|
||||||
if option_checking and len(arr) > 1:
|
if option_checking and len(arr) > 1:
|
||||||
err("Option '{}' provided more than once".format(key))
|
err("Option '{}' provided more than once".format(key))
|
||||||
option, value = arr[-1]
|
option, value = arr[-1]
|
||||||
|
|
||||||
# If we have a clear avenue to set our value in rustbuild, do so
|
# If we have a clear avenue to set our value in rustbuild, do so
|
||||||
if option.rustbuild is not None:
|
if option.rustbuild is not None:
|
||||||
set(option.rustbuild, value)
|
set(option.rustbuild, value, config)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Otherwise we're a "special" option and need some extra handling, so do
|
# Otherwise we're a "special" option and need some extra handling, so do
|
||||||
# that here.
|
# that here.
|
||||||
if option.name == 'sccache':
|
build_triple = build(known_args)
|
||||||
set('llvm.ccache', 'sccache')
|
|
||||||
elif option.name == 'local-rust':
|
|
||||||
for path in os.environ['PATH'].split(os.pathsep):
|
|
||||||
if os.path.exists(path + '/rustc'):
|
|
||||||
set('build.rustc', path + '/rustc')
|
|
||||||
break
|
|
||||||
for path in os.environ['PATH'].split(os.pathsep):
|
|
||||||
if os.path.exists(path + '/cargo'):
|
|
||||||
set('build.cargo', path + '/cargo')
|
|
||||||
break
|
|
||||||
elif option.name == 'local-rust-root':
|
|
||||||
set('build.rustc', value + '/bin/rustc')
|
|
||||||
set('build.cargo', value + '/bin/cargo')
|
|
||||||
elif option.name == 'llvm-root':
|
|
||||||
set('target.{}.llvm-config'.format(build()), value + '/bin/llvm-config')
|
|
||||||
elif option.name == 'llvm-config':
|
|
||||||
set('target.{}.llvm-config'.format(build()), value)
|
|
||||||
elif option.name == 'llvm-filecheck':
|
|
||||||
set('target.{}.llvm-filecheck'.format(build()), value)
|
|
||||||
elif option.name == 'tools':
|
|
||||||
set('build.tools', value.split(','))
|
|
||||||
elif option.name == 'codegen-backends':
|
|
||||||
set('rust.codegen-backends', value.split(','))
|
|
||||||
elif option.name == 'host':
|
|
||||||
set('build.host', value.split(','))
|
|
||||||
elif option.name == 'target':
|
|
||||||
set('build.target', value.split(','))
|
|
||||||
elif option.name == 'full-tools':
|
|
||||||
set('rust.codegen-backends', ['llvm'])
|
|
||||||
set('rust.lld', True)
|
|
||||||
set('rust.llvm-tools', True)
|
|
||||||
set('build.extended', True)
|
|
||||||
elif option.name == 'option-checking':
|
|
||||||
# this was handled above
|
|
||||||
pass
|
|
||||||
elif option.name == 'dist-compression-formats':
|
|
||||||
set('dist.compression-formats', value.split(','))
|
|
||||||
else:
|
|
||||||
raise RuntimeError("unhandled option {}".format(option.name))
|
|
||||||
|
|
||||||
set('build.configure-args', sys.argv[1:])
|
if option.name == 'sccache':
|
||||||
|
set('llvm.ccache', 'sccache', config)
|
||||||
|
elif option.name == 'local-rust':
|
||||||
|
for path in os.environ['PATH'].split(os.pathsep):
|
||||||
|
if os.path.exists(path + '/rustc'):
|
||||||
|
set('build.rustc', path + '/rustc', config)
|
||||||
|
break
|
||||||
|
for path in os.environ['PATH'].split(os.pathsep):
|
||||||
|
if os.path.exists(path + '/cargo'):
|
||||||
|
set('build.cargo', path + '/cargo', config)
|
||||||
|
break
|
||||||
|
elif option.name == 'local-rust-root':
|
||||||
|
set('build.rustc', value + '/bin/rustc', config)
|
||||||
|
set('build.cargo', value + '/bin/cargo', config)
|
||||||
|
elif option.name == 'llvm-root':
|
||||||
|
set('target.{}.llvm-config'.format(build_triple), value + '/bin/llvm-config', config)
|
||||||
|
elif option.name == 'llvm-config':
|
||||||
|
set('target.{}.llvm-config'.format(build_triple), value, config)
|
||||||
|
elif option.name == 'llvm-filecheck':
|
||||||
|
set('target.{}.llvm-filecheck'.format(build_triple), value, config)
|
||||||
|
elif option.name == 'tools':
|
||||||
|
set('build.tools', value.split(','), config)
|
||||||
|
elif option.name == 'codegen-backends':
|
||||||
|
set('rust.codegen-backends', value.split(','), config)
|
||||||
|
elif option.name == 'host':
|
||||||
|
set('build.host', value.split(','), config)
|
||||||
|
elif option.name == 'target':
|
||||||
|
set('build.target', value.split(','), config)
|
||||||
|
elif option.name == 'full-tools':
|
||||||
|
set('rust.codegen-backends', ['llvm'], config)
|
||||||
|
set('rust.lld', True, config)
|
||||||
|
set('rust.llvm-tools', True, config)
|
||||||
|
set('build.extended', True, config)
|
||||||
|
elif option.name == 'option-checking':
|
||||||
|
# this was handled above
|
||||||
|
pass
|
||||||
|
elif option.name == 'dist-compression-formats':
|
||||||
|
set('dist.compression-formats', value.split(','), config)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("unhandled option {}".format(option.name))
|
||||||
|
|
||||||
# "Parse" the `config.example.toml` file into the various sections, and we'll
|
# "Parse" the `config.example.toml` file into the various sections, and we'll
|
||||||
# use this as a template of a `config.toml` to write out which preserves
|
# use this as a template of a `config.toml` to write out which preserves
|
||||||
|
@ -373,46 +375,50 @@ set('build.configure-args', sys.argv[1:])
|
||||||
#
|
#
|
||||||
# Note that the `target` section is handled separately as we'll duplicate it
|
# Note that the `target` section is handled separately as we'll duplicate it
|
||||||
# per configured target, so there's a bit of special handling for that here.
|
# per configured target, so there's a bit of special handling for that here.
|
||||||
sections = {}
|
def parse_example_config(known_args, config):
|
||||||
cur_section = None
|
sections = {}
|
||||||
sections[None] = []
|
cur_section = None
|
||||||
section_order = [None]
|
sections[None] = []
|
||||||
targets = {}
|
section_order = [None]
|
||||||
top_level_keys = []
|
targets = {}
|
||||||
|
top_level_keys = []
|
||||||
|
|
||||||
for line in open(rust_dir + '/config.example.toml').read().split("\n"):
|
for line in open(rust_dir + '/config.example.toml').read().split("\n"):
|
||||||
if cur_section == None:
|
if cur_section == None:
|
||||||
if line.count('=') == 1:
|
if line.count('=') == 1:
|
||||||
top_level_key = line.split('=')[0]
|
top_level_key = line.split('=')[0]
|
||||||
top_level_key = top_level_key.strip(' #')
|
top_level_key = top_level_key.strip(' #')
|
||||||
top_level_keys.append(top_level_key)
|
top_level_keys.append(top_level_key)
|
||||||
if line.startswith('['):
|
if line.startswith('['):
|
||||||
cur_section = line[1:-1]
|
cur_section = line[1:-1]
|
||||||
if cur_section.startswith('target'):
|
if cur_section.startswith('target'):
|
||||||
cur_section = 'target'
|
cur_section = 'target'
|
||||||
elif '.' in cur_section:
|
elif '.' in cur_section:
|
||||||
raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
|
raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
|
||||||
sections[cur_section] = [line]
|
sections[cur_section] = [line]
|
||||||
section_order.append(cur_section)
|
section_order.append(cur_section)
|
||||||
else:
|
else:
|
||||||
sections[cur_section].append(line)
|
sections[cur_section].append(line)
|
||||||
|
|
||||||
# Fill out the `targets` array by giving all configured targets a copy of the
|
# Fill out the `targets` array by giving all configured targets a copy of the
|
||||||
# `target` section we just loaded from the example config
|
# `target` section we just loaded from the example config
|
||||||
configured_targets = [build()]
|
configured_targets = [build(known_args)]
|
||||||
if 'build' in config:
|
if 'build' in config:
|
||||||
if 'host' in config['build']:
|
if 'host' in config['build']:
|
||||||
configured_targets += config['build']['host']
|
configured_targets += config['build']['host']
|
||||||
if 'target' in config['build']:
|
if 'target' in config['build']:
|
||||||
configured_targets += config['build']['target']
|
configured_targets += config['build']['target']
|
||||||
if 'target' in config:
|
if 'target' in config:
|
||||||
for target in config['target']:
|
for target in config['target']:
|
||||||
configured_targets.append(target)
|
configured_targets.append(target)
|
||||||
for target in configured_targets:
|
for target in configured_targets:
|
||||||
targets[target] = sections['target'][:]
|
targets[target] = sections['target'][:]
|
||||||
# For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
|
# For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
|
||||||
# Avoid using quotes unless it's necessary.
|
# Avoid using quotes unless it's necessary.
|
||||||
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
|
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
|
||||||
|
|
||||||
|
configure_file(sections, top_level_keys, targets, config)
|
||||||
|
return section_order, sections, targets
|
||||||
|
|
||||||
|
|
||||||
def is_number(value):
|
def is_number(value):
|
||||||
|
@ -475,17 +481,20 @@ def configure_top_level_key(lines, top_level_key, value):
|
||||||
raise RuntimeError("failed to find config line for {}".format(top_level_key))
|
raise RuntimeError("failed to find config line for {}".format(top_level_key))
|
||||||
|
|
||||||
|
|
||||||
for section_key, section_config in config.items():
|
# Modify `sections` to reflect the parsed arguments and example configs.
|
||||||
if section_key not in sections and section_key not in top_level_keys:
|
def configure_file(sections, top_level_keys, targets, config):
|
||||||
raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
|
for section_key, section_config in config.items():
|
||||||
if section_key in top_level_keys:
|
if section_key not in sections and section_key not in top_level_keys:
|
||||||
configure_top_level_key(sections[None], section_key, section_config)
|
raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
|
||||||
|
if section_key in top_level_keys:
|
||||||
|
configure_top_level_key(sections[None], section_key, section_config)
|
||||||
|
|
||||||
|
elif section_key == 'target':
|
||||||
|
for target in section_config:
|
||||||
|
configure_section(targets[target], section_config[target])
|
||||||
|
else:
|
||||||
|
configure_section(sections[section_key], section_config)
|
||||||
|
|
||||||
elif section_key == 'target':
|
|
||||||
for target in section_config:
|
|
||||||
configure_section(targets[target], section_config[target])
|
|
||||||
else:
|
|
||||||
configure_section(sections[section_key], section_config)
|
|
||||||
|
|
||||||
def write_uncommented(target, f):
|
def write_uncommented(target, f):
|
||||||
block = []
|
block = []
|
||||||
|
@ -503,24 +512,36 @@ def write_uncommented(target, f):
|
||||||
is_comment = is_comment and line.startswith('#')
|
is_comment = is_comment and line.startswith('#')
|
||||||
return f
|
return f
|
||||||
|
|
||||||
# Now that we've built up our `config.toml`, write it all out in the same
|
|
||||||
# order that we read it in.
|
def write_config_toml(writer, section_order, targets, sections):
|
||||||
p("")
|
|
||||||
p("writing `config.toml` in current directory")
|
|
||||||
with bootstrap.output('config.toml') as f:
|
|
||||||
for section in section_order:
|
for section in section_order:
|
||||||
if section == 'target':
|
if section == 'target':
|
||||||
for target in targets:
|
for target in targets:
|
||||||
f = write_uncommented(targets[target], f)
|
writer = write_uncommented(targets[target], writer)
|
||||||
else:
|
else:
|
||||||
f = write_uncommented(sections[section], f)
|
writer = write_uncommented(sections[section], writer)
|
||||||
|
|
||||||
with bootstrap.output('Makefile') as f:
|
|
||||||
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
|
|
||||||
contents = open(contents).read()
|
|
||||||
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
|
|
||||||
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
|
||||||
f.write(contents)
|
|
||||||
|
|
||||||
p("")
|
if __name__ == "__main__":
|
||||||
p("run `python {}/x.py --help`".format(rust_dir))
|
p("processing command line")
|
||||||
|
# Parse all known arguments into a configuration structure that reflects the
|
||||||
|
# TOML we're going to write out
|
||||||
|
p("")
|
||||||
|
section_order, sections, targets = parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
# Now that we've built up our `config.toml`, write it all out in the same
|
||||||
|
# order that we read it in.
|
||||||
|
p("")
|
||||||
|
p("writing `config.toml` in current directory")
|
||||||
|
with bootstrap.output('config.toml') as f:
|
||||||
|
write_config_toml(f, section_order, targets, sections)
|
||||||
|
|
||||||
|
with bootstrap.output('Makefile') as f:
|
||||||
|
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
|
||||||
|
contents = open(contents).read()
|
||||||
|
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
|
||||||
|
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
p("")
|
||||||
|
p("run `python {}/x.py --help`".format(rust_dir))
|
||||||
|
|
|
@ -567,6 +567,8 @@ fn configure_cmake(
|
||||||
cfg.define("CMAKE_SYSTEM_NAME", "Haiku");
|
cfg.define("CMAKE_SYSTEM_NAME", "Haiku");
|
||||||
} else if target.contains("solaris") || target.contains("illumos") {
|
} else if target.contains("solaris") || target.contains("illumos") {
|
||||||
cfg.define("CMAKE_SYSTEM_NAME", "SunOS");
|
cfg.define("CMAKE_SYSTEM_NAME", "SunOS");
|
||||||
|
} else if target.contains("linux") {
|
||||||
|
cfg.define("CMAKE_SYSTEM_NAME", "Linux");
|
||||||
}
|
}
|
||||||
// When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in
|
// When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in
|
||||||
// that case like CMake we cannot easily determine system version either.
|
// that case like CMake we cannot easily determine system version either.
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
FROM ubuntu:22.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
# NOTE: intentionally uses python2 for x.py so we can test it still works.
|
||||||
|
# validate-toolstate only runs in our CI, so it's ok for it to only support python3.
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
g++ \
|
g++ \
|
||||||
make \
|
make \
|
||||||
|
@ -8,6 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
file \
|
file \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
|
python2.7 \
|
||||||
python3 \
|
python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
python3-pkg-resources \
|
python3-pkg-resources \
|
||||||
|
@ -30,4 +33,4 @@ RUN pip3 install --no-deps --no-cache-dir --require-hashes -r /tmp/reuse-require
|
||||||
COPY host-x86_64/mingw-check/validate-toolstate.sh /scripts/
|
COPY host-x86_64/mingw-check/validate-toolstate.sh /scripts/
|
||||||
COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
|
COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
|
||||||
|
|
||||||
ENV SCRIPT python3 ../x.py test --stage 0 src/tools/tidy tidyselftest
|
ENV SCRIPT python2.7 ../x.py test --stage 0 src/tools/tidy tidyselftest
|
||||||
|
|
|
@ -2,7 +2,6 @@ FROM ubuntu:22.04
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
# NOTE: intentionally installs both python2 and python3 so we can test support for both.
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
g++ \
|
g++ \
|
||||||
gcc-multilib \
|
gcc-multilib \
|
||||||
|
@ -11,8 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
file \
|
file \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python3.11 \
|
||||||
python3 \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
sudo \
|
sudo \
|
||||||
|
|
|
@ -39,6 +39,7 @@ use std::hash::Hash;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
|
use crate::clean::inline::merge_attrs;
|
||||||
use crate::core::{self, DocContext, ImplTraitParam};
|
use crate::core::{self, DocContext, ImplTraitParam};
|
||||||
use crate::formats::item_type::ItemType;
|
use crate::formats::item_type::ItemType;
|
||||||
use crate::visit_ast::Module as DocModule;
|
use crate::visit_ast::Module as DocModule;
|
||||||
|
@ -2373,21 +2374,22 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||||
_ => unreachable!("not yet converted"),
|
_ => unreachable!("not yet converted"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut extra_attrs = Vec::new();
|
let mut import_attrs = Vec::new();
|
||||||
|
let mut target_attrs = Vec::new();
|
||||||
if let Some(import_id) = import_id &&
|
if let Some(import_id) = import_id &&
|
||||||
let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id)
|
let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id)
|
||||||
{
|
{
|
||||||
let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some();
|
let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some();
|
||||||
// Then we get all the various imports' attributes.
|
// Then we get all the various imports' attributes.
|
||||||
get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut extra_attrs, is_inline);
|
get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut import_attrs, is_inline);
|
||||||
add_without_unwanted_attributes(&mut extra_attrs, inline::load_attrs(cx, def_id), is_inline);
|
add_without_unwanted_attributes(&mut target_attrs, inline::load_attrs(cx, def_id), is_inline);
|
||||||
} else {
|
} else {
|
||||||
// We only keep the item's attributes.
|
// We only keep the item's attributes.
|
||||||
extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
|
target_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
let attrs = Attributes::from_ast(&extra_attrs);
|
let import_parent = import_id.map(|import_id| cx.tcx.local_parent(import_id).to_def_id());
|
||||||
let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
|
let (attrs, cfg) = merge_attrs(cx, import_parent, &target_attrs, Some(&import_attrs));
|
||||||
|
|
||||||
let mut item =
|
let mut item =
|
||||||
Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
|
Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
|
||||||
|
|
|
@ -9,6 +9,6 @@ clap = "4.0.32"
|
||||||
env_logger = "0.7.1"
|
env_logger = "0.7.1"
|
||||||
|
|
||||||
[dependencies.mdbook]
|
[dependencies.mdbook]
|
||||||
version = "0.4.25"
|
version = "0.4.28"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["search"]
|
features = ["search"]
|
||||||
|
|
16
tests/rustdoc-ui/intra-doc/import-inline-merge.rs
Normal file
16
tests/rustdoc-ui/intra-doc/import-inline-merge.rs
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
// Import for `A` is inlined and doc comments on the import and `A` itself are merged.
|
||||||
|
// After the merge they still have correct parent scopes to resolve both `[A]` and `[B]`.
|
||||||
|
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![allow(rustdoc::private_intra_doc_links)]
|
||||||
|
|
||||||
|
mod m {
|
||||||
|
/// [B]
|
||||||
|
pub struct A {}
|
||||||
|
|
||||||
|
pub struct B {}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// [A]
|
||||||
|
pub use m::A;
|
|
@ -1,5 +1,7 @@
|
||||||
// check-pass
|
// check-pass
|
||||||
// edition:2021
|
// edition:2021
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(async_fn_in_trait)]
|
#![feature(async_fn_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
warning: the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
warning: the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
--> $DIR/lifetime-mismatch.rs:3:12
|
--> $DIR/lifetime-mismatch.rs:5:12
|
||||||
|
|
|
|
||||||
LL | #![feature(async_fn_in_trait)]
|
LL | #![feature(async_fn_in_trait)]
|
||||||
| ^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^
|
||||||
|
@ -8,7 +8,7 @@ LL | #![feature(async_fn_in_trait)]
|
||||||
= note: `#[warn(incomplete_features)]` on by default
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
error[E0195]: lifetime parameters or bounds on method `foo` do not match the trait declaration
|
error[E0195]: lifetime parameters or bounds on method `foo` do not match the trait declaration
|
||||||
--> $DIR/lifetime-mismatch.rs:12:17
|
--> $DIR/lifetime-mismatch.rs:14:17
|
||||||
|
|
|
|
||||||
LL | async fn foo<'a>(&self);
|
LL | async fn foo<'a>(&self);
|
||||||
| ---- lifetimes in impl do not match this method in trait
|
| ---- lifetimes in impl do not match this method in trait
|
21
tests/ui/async-await/in-trait/lifetime-mismatch.next.stderr
Normal file
21
tests/ui/async-await/in-trait/lifetime-mismatch.next.stderr
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
warning: the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
|
--> $DIR/lifetime-mismatch.rs:5:12
|
||||||
|
|
|
||||||
|
LL | #![feature(async_fn_in_trait)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #91611 <https://github.com/rust-lang/rust/issues/91611> for more information
|
||||||
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
|
error[E0195]: lifetime parameters or bounds on method `foo` do not match the trait declaration
|
||||||
|
--> $DIR/lifetime-mismatch.rs:14:17
|
||||||
|
|
|
||||||
|
LL | async fn foo<'a>(&self);
|
||||||
|
| ---- lifetimes in impl do not match this method in trait
|
||||||
|
...
|
||||||
|
LL | async fn foo(&self) {}
|
||||||
|
| ^ lifetimes do not match method in trait
|
||||||
|
|
||||||
|
error: aborting due to previous error; 1 warning emitted
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0195`.
|
|
@ -1,4 +1,6 @@
|
||||||
// edition:2021
|
// edition:2021
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(async_fn_in_trait)]
|
#![feature(async_fn_in_trait)]
|
||||||
//~^ WARN the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
//~^ WARN the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
#![feature(generic_const_exprs)]
|
||||||
|
//~^ WARN the feature `generic_const_exprs` is incomplete
|
||||||
|
|
||||||
|
trait B {
|
||||||
|
type U<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<T: B<U<1i32> = ()>>() {}
|
||||||
|
//~^ ERROR constant provided when a type was expected
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,18 @@
|
||||||
|
warning: the feature `generic_const_exprs` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
|
--> $DIR/mismatched-gat-subst-kind.rs:1:12
|
||||||
|
|
|
||||||
|
LL | #![feature(generic_const_exprs)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #76560 <https://github.com/rust-lang/rust/issues/76560> for more information
|
||||||
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
|
error[E0747]: constant provided when a type was expected
|
||||||
|
--> $DIR/mismatched-gat-subst-kind.rs:8:13
|
||||||
|
|
|
||||||
|
LL | fn f<T: B<U<1i32> = ()>>() {}
|
||||||
|
| ^^^^
|
||||||
|
|
||||||
|
error: aborting due to previous error; 1 warning emitted
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0747`.
|
|
@ -1,5 +1,7 @@
|
||||||
// check-pass
|
// check-pass
|
||||||
// edition:2021
|
// edition:2021
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(async_fn_in_trait, return_position_impl_trait_in_trait)]
|
#![feature(async_fn_in_trait, return_position_impl_trait_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
// check-pass
|
// check-pass
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(return_position_impl_trait_in_trait)]
|
#![feature(return_position_impl_trait_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
// check-pass
|
// check-pass
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(return_position_impl_trait_in_trait)]
|
#![feature(return_position_impl_trait_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
error[E0049]: method `bar` has 0 type parameters but its trait declaration has 1 type parameter
|
error[E0049]: method `bar` has 0 type parameters but its trait declaration has 1 type parameter
|
||||||
--> $DIR/trait-more-generics-than-impl.rs:11:11
|
--> $DIR/trait-more-generics-than-impl.rs:14:11
|
||||||
|
|
|
|
||||||
LL | fn bar<T>() -> impl Sized;
|
LL | fn bar<T>() -> impl Sized;
|
||||||
| - expected 1 type parameter
|
| - expected 1 type parameter
|
|
@ -0,0 +1,12 @@
|
||||||
|
error[E0049]: method `bar` has 0 type parameters but its trait declaration has 1 type parameter
|
||||||
|
--> $DIR/trait-more-generics-than-impl.rs:14:11
|
||||||
|
|
|
||||||
|
LL | fn bar<T>() -> impl Sized;
|
||||||
|
| - expected 1 type parameter
|
||||||
|
...
|
||||||
|
LL | fn bar() -> impl Sized {}
|
||||||
|
| ^ found 0 type parameters
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0049`.
|
|
@ -1,3 +1,6 @@
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(return_position_impl_trait_in_trait)]
|
#![feature(return_position_impl_trait_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
// check-pass
|
// check-pass
|
||||||
// edition: 2021
|
// edition: 2021
|
||||||
|
// [next] compile-flags: -Zlower-impl-trait-in-trait-to-assoc-ty
|
||||||
|
// revisions: current next
|
||||||
|
|
||||||
#![feature(return_position_impl_trait_in_trait)]
|
#![feature(return_position_impl_trait_in_trait)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// run-pass
|
// run-pass
|
||||||
// revisions: default mir-opt
|
// revisions: default mir-opt
|
||||||
|
//[default] compile-flags: -Zinline-mir=no
|
||||||
//[mir-opt] compile-flags: -Zmir-opt-level=4
|
//[mir-opt] compile-flags: -Zmir-opt-level=4
|
||||||
|
|
||||||
macro_rules! caller_location_from_macro {
|
macro_rules! caller_location_from_macro {
|
||||||
|
@ -9,13 +10,13 @@ macro_rules! caller_location_from_macro {
|
||||||
fn main() {
|
fn main() {
|
||||||
let loc = core::panic::Location::caller();
|
let loc = core::panic::Location::caller();
|
||||||
assert_eq!(loc.file(), file!());
|
assert_eq!(loc.file(), file!());
|
||||||
assert_eq!(loc.line(), 10);
|
assert_eq!(loc.line(), 11);
|
||||||
assert_eq!(loc.column(), 15);
|
assert_eq!(loc.column(), 15);
|
||||||
|
|
||||||
// `Location::caller()` in a macro should behave similarly to `file!` and `line!`,
|
// `Location::caller()` in a macro should behave similarly to `file!` and `line!`,
|
||||||
// i.e. point to where the macro was invoked, instead of the macro itself.
|
// i.e. point to where the macro was invoked, instead of the macro itself.
|
||||||
let loc2 = caller_location_from_macro!();
|
let loc2 = caller_location_from_macro!();
|
||||||
assert_eq!(loc2.file(), file!());
|
assert_eq!(loc2.file(), file!());
|
||||||
assert_eq!(loc2.line(), 17);
|
assert_eq!(loc2.line(), 18);
|
||||||
assert_eq!(loc2.column(), 16);
|
assert_eq!(loc2.column(), 16);
|
||||||
}
|
}
|
||||||
|
|
23
tests/ui/rfc-2091-track-caller/mir-inlined-macro.rs
Normal file
23
tests/ui/rfc-2091-track-caller/mir-inlined-macro.rs
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
// run-pass
|
||||||
|
// revisions: default mir-opt
|
||||||
|
//[default] compile-flags: -Zinline-mir=no
|
||||||
|
//[mir-opt] compile-flags: -Zmir-opt-level=4
|
||||||
|
|
||||||
|
use std::panic::Location;
|
||||||
|
|
||||||
|
macro_rules! f {
|
||||||
|
() => {
|
||||||
|
Location::caller()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn g() -> &'static Location<'static> {
|
||||||
|
f!()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let loc = g();
|
||||||
|
assert_eq!(loc.line(), 16);
|
||||||
|
assert_eq!(loc.column(), 5);
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue