Auto merge of #138350 - Kobzol:rollup-4kj94rq, r=Kobzol

Rollup of 10 pull requests

Successful merges:

 - #135987 (Clarify iterator by_ref docs)
 - #137967 ([AIX] Fix hangs during testing)
 - #138063 (Improve `-Zunpretty=hir` for parsed attrs)
 - #138147 (Add maintainers for powerpc64le-unknown-linux-gnu)
 - #138288 (Document -Z crate-attr)
 - #138300 (add tracking issue for unqualified_local_imports)
 - #138307 (Allow specifying glob patterns for try jobs)
 - #138315 (use next_back() instead of last() on DoubleEndedIterator)
 - #138330 (Remove unnecessary `[lints.rust]` sections.)
 - #138335 (Fix post-merge workflow)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-03-11 15:08:43 +00:00
commit f2d69d5a7c
51 changed files with 643 additions and 316 deletions

View file

@ -36,7 +36,7 @@ jobs:
cd src/ci/citool
echo "Post-merge analysis result" > output.log
cargo run --release post-merge-analysis ${PARENT_COMMIT} ${{ github.sha }} >> output.log
cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log
cat output.log
gh pr comment ${HEAD_PR} -F output.log

View file

@ -35,13 +35,17 @@ pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStabl
/// like [`Span`]s and empty tuples, are gracefully skipped so they don't clutter the
/// representation much.
pub trait PrintAttribute {
fn print_something(&self) -> bool;
/// Whether or not this will render as something meaningful, or if it's skipped
/// (which will force the containing struct to also skip printing a comma
/// and the field name).
fn should_render(&self) -> bool;
fn print_attribute(&self, p: &mut Printer);
}
impl<T: PrintAttribute> PrintAttribute for &T {
fn print_something(&self) -> bool {
T::print_something(self)
fn should_render(&self) -> bool {
T::should_render(self)
}
fn print_attribute(&self, p: &mut Printer) {
@ -49,9 +53,10 @@ impl<T: PrintAttribute> PrintAttribute for &T {
}
}
impl<T: PrintAttribute> PrintAttribute for Option<T> {
fn print_something(&self) -> bool {
self.as_ref().is_some_and(|x| x.print_something())
fn should_render(&self) -> bool {
self.as_ref().is_some_and(|x| x.should_render())
}
fn print_attribute(&self, p: &mut Printer) {
if let Some(i) = self {
T::print_attribute(i, p)
@ -59,9 +64,10 @@ impl<T: PrintAttribute> PrintAttribute for Option<T> {
}
}
impl<T: PrintAttribute> PrintAttribute for ThinVec<T> {
fn print_something(&self) -> bool {
self.is_empty() || self[0].print_something()
fn should_render(&self) -> bool {
self.is_empty() || self[0].should_render()
}
fn print_attribute(&self, p: &mut Printer) {
let mut last_printed = false;
p.word("[");
@ -70,7 +76,7 @@ impl<T: PrintAttribute> PrintAttribute for ThinVec<T> {
p.word_space(",");
}
i.print_attribute(p);
last_printed = i.print_something();
last_printed = i.should_render();
}
p.word("]");
}
@ -78,7 +84,7 @@ impl<T: PrintAttribute> PrintAttribute for ThinVec<T> {
macro_rules! print_skip {
($($t: ty),* $(,)?) => {$(
impl PrintAttribute for $t {
fn print_something(&self) -> bool { false }
fn should_render(&self) -> bool { false }
fn print_attribute(&self, _: &mut Printer) { }
})*
};
@ -87,7 +93,7 @@ macro_rules! print_skip {
macro_rules! print_disp {
($($t: ty),* $(,)?) => {$(
impl PrintAttribute for $t {
fn print_something(&self) -> bool { true }
fn should_render(&self) -> bool { true }
fn print_attribute(&self, p: &mut Printer) {
p.word(format!("{}", self));
}
@ -97,7 +103,7 @@ macro_rules! print_disp {
macro_rules! print_debug {
($($t: ty),* $(,)?) => {$(
impl PrintAttribute for $t {
fn print_something(&self) -> bool { true }
fn should_render(&self) -> bool { true }
fn print_attribute(&self, p: &mut Printer) {
p.word(format!("{:?}", self));
}
@ -106,37 +112,39 @@ macro_rules! print_debug {
}
macro_rules! print_tup {
(num_print_something $($ts: ident)*) => { 0 $(+ $ts.print_something() as usize)* };
(num_should_render $($ts: ident)*) => { 0 $(+ $ts.should_render() as usize)* };
() => {};
($t: ident $($ts: ident)*) => {
#[allow(non_snake_case, unused)]
impl<$t: PrintAttribute, $($ts: PrintAttribute),*> PrintAttribute for ($t, $($ts),*) {
fn print_something(&self) -> bool {
fn should_render(&self) -> bool {
let ($t, $($ts),*) = self;
print_tup!(num_print_something $t $($ts)*) != 0
print_tup!(num_should_render $t $($ts)*) != 0
}
fn print_attribute(&self, p: &mut Printer) {
let ($t, $($ts),*) = self;
let parens = print_tup!(num_print_something $t $($ts)*) > 1;
let parens = print_tup!(num_should_render $t $($ts)*) > 1;
if parens {
p.word("(");
p.popen();
}
let mut printed_anything = $t.print_something();
let mut printed_anything = $t.should_render();
$t.print_attribute(p);
$(
if printed_anything && $ts.print_something() {
p.word_space(",");
if $ts.should_render() {
if printed_anything {
p.word_space(",");
}
printed_anything = true;
}
$ts.print_attribute(p);
)*
if parens {
p.word(")");
p.pclose();
}
}
}
@ -147,8 +155,8 @@ macro_rules! print_tup {
print_tup!(A B C D E F G H);
print_skip!(Span, ());
print_disp!(Symbol, u16, bool, NonZero<u32>);
print_debug!(UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency);
print_disp!(u16, bool, NonZero<u32>);
print_debug!(Symbol, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency);
/// Finds attributes in sequences of attributes by pattern matching.
///

View file

@ -3,10 +3,6 @@ name = "rustc_builtin_macros"
version = "0.0.0"
edition = "2024"
[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(llvm_enzyme)'] }
[lib]
doctest = false

View file

@ -240,7 +240,7 @@ declare_features! (
/// Added for testing unstable lints; perma-unstable.
(internal, test_unstable_lint, "1.60.0", None),
/// Helps with formatting for `group_imports = "StdExternalCrate"`.
(unstable, unqualified_local_imports, "1.83.0", None),
(unstable, unqualified_local_imports, "1.83.0", Some(138299)),
/// Use for stable + negative coherence and strict coherence depending on trait's
/// rustc_strict_coherence value.
(unstable, with_negative_coherence, "1.60.0", None),

View file

@ -1520,7 +1520,7 @@ fn generics_args_err_extend<'a>(
})
.collect();
if args.len() > 1
&& let Some(span) = args.into_iter().last()
&& let Some(span) = args.into_iter().next_back()
{
err.note(
"generic arguments are not allowed on both an enum and its variant's path \

View file

@ -118,9 +118,9 @@ impl<'a> State<'a> {
self.hardbreak()
}
hir::Attribute::Parsed(pa) => {
self.word("#[attr=\"");
self.word("#[attr = ");
pa.print_attribute(self);
self.word("\")]");
self.word("]");
self.hardbreak()
}
}

View file

@ -16,12 +16,14 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok
let name = field.ident.as_ref().unwrap();
let string_name = name.to_string();
disps.push(quote! {
if __printed_anything && #name.print_something() {
__p.word_space(",");
if #name.should_render() {
if __printed_anything {
__p.word_space(",");
}
__p.word(#string_name);
__p.word_space(":");
__printed_anything = true;
}
__p.word(#string_name);
__p.word_space(":");
#name.print_attribute(__p);
});
field_names.push(name);
@ -31,10 +33,11 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok
quote! { {#(#field_names),*} },
quote! {
__p.word(#string_name);
if true #(&& !#field_names.print_something())* {
if true #(&& !#field_names.should_render())* {
return;
}
__p.nbsp();
__p.word("{");
#(#disps)*
__p.word("}");
@ -48,8 +51,10 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok
for idx in 0..fields_unnamed.unnamed.len() {
let name = format_ident!("f{idx}");
disps.push(quote! {
if __printed_anything && #name.print_something() {
__p.word_space(",");
if #name.should_render() {
if __printed_anything {
__p.word_space(",");
}
__printed_anything = true;
}
#name.print_attribute(__p);
@ -62,13 +67,13 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok
quote! {
__p.word(#string_name);
if true #(&& !#field_names.print_something())* {
if true #(&& !#field_names.should_render())* {
return;
}
__p.word("(");
__p.popen();
#(#disps)*
__p.word(")");
__p.pclose();
},
quote! { true },
)
@ -138,7 +143,7 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream {
input.gen_impl(quote! {
#[allow(unused)]
gen impl PrintAttribute for @Self {
fn print_something(&self) -> bool { #printed }
fn should_render(&self) -> bool { #printed }
fn print_attribute(&self, __p: &mut rustc_ast_pretty::pp::Printer) { #code }
}
})

View file

@ -2107,7 +2107,7 @@ impl<'a> Parser<'a> {
ast::GenericBound::Trait(poly) => Some(poly),
_ => None,
})
.last()
.next_back()
{
err.span_suggestion_verbose(
poly.span.shrink_to_hi(),

View file

@ -33,6 +33,3 @@ nightly = [
"rustc_index/nightly",
"rustc_ast_ir/nightly",
]
[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] }

View file

@ -1825,10 +1825,19 @@ pub trait Iterator {
Inspect::new(self, f)
}
/// Borrows an iterator, rather than consuming it.
/// Creates a "by reference" adapter for this instance of `Iterator`.
///
/// This is useful to allow applying iterator adapters while still
/// retaining ownership of the original iterator.
/// Consuming method calls (direct or indirect calls to `next`)
/// on the "by reference" adapter will consume the original iterator,
/// but ownership-taking methods (those with a `self` parameter)
/// only take ownership of the "by reference" iterator.
///
/// This is useful for applying ownership-taking methods
/// (such as `take` in the example below)
/// without giving up ownership of the original iterator,
/// so you can use the original iterator afterwards.
///
/// Uses [impl<I: Iterator + ?Sized> Iterator for &mut I { type Item = I::Item; ...}](https://doc.rust-lang.org/nightly/std/iter/trait.Iterator.html#impl-Iterator-for-%26mut+I).
///
/// # Examples
///
@ -4024,6 +4033,9 @@ where
}
}
/// Implements `Iterator` for mutable references to iterators, such as those produced by [`Iterator::by_ref`].
///
/// This implementation passes all method calls on to the original iterator.
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + ?Sized> Iterator for &mut I {
type Item = I::Item;

View file

@ -31,3 +31,14 @@ pub fn tsa<A: ToSocketAddrs>(a: A) -> Result<Vec<SocketAddr>, String> {
Err(e) => Err(e.to_string()),
}
}
pub fn compare_ignore_zoneid(a: &SocketAddr, b: &SocketAddr) -> bool {
match (a, b) {
(SocketAddr::V6(a), SocketAddr::V6(b)) => {
a.ip().segments() == b.ip().segments()
&& a.flowinfo() == b.flowinfo()
&& a.port() == b.port()
}
_ => a == b,
}
}

View file

@ -1,4 +1,4 @@
use crate::net::test::{next_test_ip4, next_test_ip6};
use crate::net::test::{compare_ignore_zoneid, next_test_ip4, next_test_ip6};
use crate::net::*;
use crate::sync::mpsc::channel;
use crate::thread;
@ -46,7 +46,7 @@ fn socket_smoke_test_ip4() {
let (nread, src) = t!(server.recv_from(&mut buf));
assert_eq!(nread, 1);
assert_eq!(buf[0], 99);
assert_eq!(src, client_ip);
assert_eq!(compare_ignore_zoneid(&src, &client_ip), true);
rx2.recv().unwrap();
})
}
@ -78,7 +78,9 @@ fn udp_clone_smoke() {
let _t = thread::spawn(move || {
let mut buf = [0, 0];
assert_eq!(sock2.recv_from(&mut buf).unwrap(), (1, addr1));
let res = sock2.recv_from(&mut buf).unwrap();
assert_eq!(res.0, 1);
assert_eq!(compare_ignore_zoneid(&res.1, &addr1), true);
assert_eq!(buf[0], 1);
t!(sock2.send_to(&[2], &addr1));
});
@ -94,7 +96,9 @@ fn udp_clone_smoke() {
});
tx1.send(()).unwrap();
let mut buf = [0, 0];
assert_eq!(sock1.recv_from(&mut buf).unwrap(), (1, addr2));
let res = sock1.recv_from(&mut buf).unwrap();
assert_eq!(res.0, 1);
assert_eq!(compare_ignore_zoneid(&res.1, &addr2), true);
rx2.recv().unwrap();
})
}

View file

@ -107,6 +107,7 @@ dependencies = [
"build_helper",
"clap",
"csv",
"glob-match",
"insta",
"serde",
"serde_json",
@ -308,6 +309,12 @@ dependencies = [
"wasi",
]
[[package]]
name = "glob-match"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d"
[[package]]
name = "hashbrown"
version = "0.15.2"

View file

@ -7,6 +7,7 @@ edition = "2021"
anyhow = "1"
clap = { version = "4.5", features = ["derive"] }
csv = "1"
glob-match = "0.2"
serde = { version = "1", features = ["derive"] }
serde_yaml = "0.9"
serde_json = "1"

244
src/ci/citool/src/jobs.rs Normal file
View file

@ -0,0 +1,244 @@
#[cfg(test)]
mod tests;
use std::collections::BTreeMap;
use serde_yaml::Value;
use crate::GitHubContext;
/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file.
#[derive(serde::Deserialize, Debug, Clone)]
pub struct Job {
/// Name of the job, e.g. mingw-check
pub name: String,
/// GitHub runner on which the job should be executed
pub os: String,
pub env: BTreeMap<String, Value>,
/// Should the job be only executed on a specific channel?
#[serde(default)]
pub only_on_channel: Option<String>,
/// Do not cancel the whole workflow if this job fails.
#[serde(default)]
pub continue_on_error: Option<bool>,
/// Free additional disk space in the job, by removing unused packages.
#[serde(default)]
pub free_disk: Option<bool>,
}
impl Job {
/// By default, the Docker image of a job is based on its name.
/// However, it can be overridden by its IMAGE environment variable.
pub fn image(&self) -> String {
self.env
.get("IMAGE")
.map(|v| v.as_str().expect("IMAGE value should be a string").to_string())
.unwrap_or_else(|| self.name.clone())
}
fn is_linux(&self) -> bool {
self.os.contains("ubuntu")
}
}
#[derive(serde::Deserialize, Debug)]
struct JobEnvironments {
#[serde(rename = "pr")]
pr_env: BTreeMap<String, Value>,
#[serde(rename = "try")]
try_env: BTreeMap<String, Value>,
#[serde(rename = "auto")]
auto_env: BTreeMap<String, Value>,
}
#[derive(serde::Deserialize, Debug)]
pub struct JobDatabase {
#[serde(rename = "pr")]
pub pr_jobs: Vec<Job>,
#[serde(rename = "try")]
pub try_jobs: Vec<Job>,
#[serde(rename = "auto")]
pub auto_jobs: Vec<Job>,
/// Shared environments for the individual run types.
envs: JobEnvironments,
}
impl JobDatabase {
/// Find `auto` jobs that correspond to the passed `pattern`.
/// Patterns are matched using the glob syntax.
/// For example `dist-*` matches all jobs starting with `dist-`.
fn find_auto_jobs_by_pattern(&self, pattern: &str) -> Vec<Job> {
self.auto_jobs
.iter()
.filter(|j| glob_match::glob_match(pattern, &j.name))
.cloned()
.collect()
}
}
pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> {
let mut db: Value = serde_yaml::from_str(&db)?;
// We need to expand merge keys (<<), because serde_yaml can't deal with them
// `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges.
db.apply_merge()?;
db.apply_merge()?;
let db: JobDatabase = serde_yaml::from_value(db)?;
Ok(db)
}
/// Representation of a job outputted to a GitHub Actions workflow.
#[derive(serde::Serialize, Debug)]
struct GithubActionsJob {
/// The main identifier of the job, used by CI scripts to determine what should be executed.
name: String,
/// Helper label displayed in GitHub Actions interface, containing the job name and a run type
/// prefix (PR/try/auto).
full_name: String,
os: String,
env: BTreeMap<String, serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
continue_on_error: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
free_disk: Option<bool>,
}
/// Skip CI jobs that are not supposed to be executed on the given `channel`.
fn skip_jobs(jobs: Vec<Job>, channel: &str) -> Vec<Job> {
jobs.into_iter()
.filter(|job| {
job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel)
})
.collect()
}
/// Type of workflow that is being executed on CI
#[derive(Debug)]
pub enum RunType {
/// Workflows that run after a push to a PR branch
PullRequest,
/// Try run started with @bors try
TryJob { job_patterns: Option<Vec<String>> },
/// Merge attempt workflow
AutoJob,
}
/// Maximum number of custom try jobs that can be requested in a single
/// `@bors try` request.
const MAX_TRY_JOBS_COUNT: usize = 20;
fn calculate_jobs(
run_type: &RunType,
db: &JobDatabase,
channel: &str,
) -> anyhow::Result<Vec<GithubActionsJob>> {
let (jobs, prefix, base_env) = match run_type {
RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env),
RunType::TryJob { job_patterns } => {
let jobs = if let Some(patterns) = job_patterns {
let mut jobs: Vec<Job> = vec![];
let mut unknown_patterns = vec![];
for pattern in patterns {
let matched_jobs = db.find_auto_jobs_by_pattern(pattern);
if matched_jobs.is_empty() {
unknown_patterns.push(pattern.clone());
} else {
for job in matched_jobs {
if !jobs.iter().any(|j| j.name == job.name) {
jobs.push(job);
}
}
}
}
if !unknown_patterns.is_empty() {
return Err(anyhow::anyhow!(
"Patterns `{}` did not match any auto jobs",
unknown_patterns.join(", ")
));
}
if jobs.len() > MAX_TRY_JOBS_COUNT {
return Err(anyhow::anyhow!(
"It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs expanded from {} pattern(s)",
jobs.len(),
patterns.len()
));
}
jobs
} else {
db.try_jobs.clone()
};
(jobs, "try", &db.envs.try_env)
}
RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env),
};
let jobs = skip_jobs(jobs, channel);
let jobs = jobs
.into_iter()
.map(|job| {
let mut env: BTreeMap<String, serde_json::Value> = crate::yaml_map_to_json(base_env);
env.extend(crate::yaml_map_to_json(&job.env));
let full_name = format!("{prefix} - {}", job.name);
GithubActionsJob {
name: job.name,
full_name,
os: job.os,
env,
continue_on_error: job.continue_on_error,
free_disk: job.free_disk,
}
})
.collect();
Ok(jobs)
}
pub fn calculate_job_matrix(
db: JobDatabase,
gh_ctx: GitHubContext,
channel: &str,
) -> anyhow::Result<()> {
let run_type = gh_ctx.get_run_type().ok_or_else(|| {
anyhow::anyhow!("Cannot determine the type of workflow that is being executed")
})?;
eprintln!("Run type: {run_type:?}");
let jobs = calculate_jobs(&run_type, &db, channel)?;
if jobs.is_empty() {
return Err(anyhow::anyhow!("Computed job list is empty"));
}
let run_type = match run_type {
RunType::PullRequest => "pr",
RunType::TryJob { .. } => "try",
RunType::AutoJob => "auto",
};
eprintln!("Output");
eprintln!("jobs={jobs:?}");
eprintln!("run_type={run_type}");
println!("jobs={}", serde_json::to_string(&jobs)?);
println!("run_type={run_type}");
Ok(())
}
pub fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> {
let Some(job) = jobs.iter().find(|j| j.name == name) else {
let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect();
let mut available_jobs =
available_jobs.iter().map(|j| j.name.to_string()).collect::<Vec<_>>();
available_jobs.sort();
return Err(anyhow::anyhow!(
"Job {name} not found. The following jobs are available:\n{}",
available_jobs.join(", ")
));
};
if !job.is_linux() {
return Err(anyhow::anyhow!("Only Linux jobs can be executed locally"));
}
Ok(job)
}

View file

@ -0,0 +1,64 @@
use crate::jobs::{JobDatabase, load_job_db};
#[test]
fn lookup_job_pattern() {
let db = load_job_db(
r#"
envs:
pr:
try:
auto:
pr:
try:
auto:
- name: dist-a
os: ubuntu
env: {}
- name: dist-a-alt
os: ubuntu
env: {}
- name: dist-b
os: ubuntu
env: {}
- name: dist-b-alt
os: ubuntu
env: {}
- name: test-a
os: ubuntu
env: {}
- name: test-a-alt
os: ubuntu
env: {}
- name: test-i686
os: ubuntu
env: {}
- name: dist-i686
os: ubuntu
env: {}
- name: test-msvc-i686-1
os: ubuntu
env: {}
- name: test-msvc-i686-2
os: ubuntu
env: {}
"#,
)
.unwrap();
check_pattern(&db, "dist-*", &["dist-a", "dist-a-alt", "dist-b", "dist-b-alt", "dist-i686"]);
check_pattern(&db, "*-alt", &["dist-a-alt", "dist-b-alt", "test-a-alt"]);
check_pattern(&db, "dist*-alt", &["dist-a-alt", "dist-b-alt"]);
check_pattern(
&db,
"*i686*",
&["test-i686", "dist-i686", "test-msvc-i686-1", "test-msvc-i686-2"],
);
}
#[track_caller]
fn check_pattern(db: &JobDatabase, pattern: &str, expected: &[&str]) {
let jobs =
db.find_auto_jobs_by_pattern(pattern).into_iter().map(|j| j.name).collect::<Vec<_>>();
assert_eq!(jobs, expected);
}

View file

@ -1,5 +1,6 @@
mod cpu_usage;
mod datadog;
mod jobs;
mod merge_report;
mod metrics;
mod utils;
@ -10,10 +11,12 @@ use std::process::Command;
use anyhow::Context;
use clap::Parser;
use jobs::JobDatabase;
use serde_yaml::Value;
use crate::cpu_usage::load_cpu_usage;
use crate::datadog::upload_datadog_metric;
use crate::jobs::RunType;
use crate::merge_report::post_merge_report;
use crate::metrics::postprocess_metrics;
use crate::utils::load_env_var;
@ -22,104 +25,6 @@ const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
const JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../github-actions/jobs.yml");
/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file.
#[derive(serde::Deserialize, Debug, Clone)]
struct Job {
/// Name of the job, e.g. mingw-check
name: String,
/// GitHub runner on which the job should be executed
os: String,
env: BTreeMap<String, Value>,
/// Should the job be only executed on a specific channel?
#[serde(default)]
only_on_channel: Option<String>,
/// Rest of attributes that will be passed through to GitHub actions
#[serde(flatten)]
extra_keys: BTreeMap<String, Value>,
}
impl Job {
fn is_linux(&self) -> bool {
self.os.contains("ubuntu")
}
/// By default, the Docker image of a job is based on its name.
/// However, it can be overridden by its IMAGE environment variable.
fn image(&self) -> String {
self.env
.get("IMAGE")
.map(|v| v.as_str().expect("IMAGE value should be a string").to_string())
.unwrap_or_else(|| self.name.clone())
}
}
#[derive(serde::Deserialize, Debug)]
struct JobEnvironments {
#[serde(rename = "pr")]
pr_env: BTreeMap<String, Value>,
#[serde(rename = "try")]
try_env: BTreeMap<String, Value>,
#[serde(rename = "auto")]
auto_env: BTreeMap<String, Value>,
}
#[derive(serde::Deserialize, Debug)]
struct JobDatabase {
#[serde(rename = "pr")]
pr_jobs: Vec<Job>,
#[serde(rename = "try")]
try_jobs: Vec<Job>,
#[serde(rename = "auto")]
auto_jobs: Vec<Job>,
/// Shared environments for the individual run types.
envs: JobEnvironments,
}
impl JobDatabase {
fn find_auto_job_by_name(&self, name: &str) -> Option<Job> {
self.auto_jobs.iter().find(|j| j.name == name).cloned()
}
}
fn load_job_db(path: &Path) -> anyhow::Result<JobDatabase> {
let db = utils::read_to_string(path)?;
let mut db: Value = serde_yaml::from_str(&db)?;
// We need to expand merge keys (<<), because serde_yaml can't deal with them
// `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges.
db.apply_merge()?;
db.apply_merge()?;
let db: JobDatabase = serde_yaml::from_value(db)?;
Ok(db)
}
/// Representation of a job outputted to a GitHub Actions workflow.
#[derive(serde::Serialize, Debug)]
struct GithubActionsJob {
/// The main identifier of the job, used by CI scripts to determine what should be executed.
name: String,
/// Helper label displayed in GitHub Actions interface, containing the job name and a run type
/// prefix (PR/try/auto).
full_name: String,
os: String,
env: BTreeMap<String, serde_json::Value>,
#[serde(flatten)]
extra_keys: BTreeMap<String, serde_json::Value>,
}
/// Type of workflow that is being executed on CI
#[derive(Debug)]
enum RunType {
/// Workflows that run after a push to a PR branch
PullRequest,
/// Try run started with @bors try
TryJob { custom_jobs: Option<Vec<String>> },
/// Merge attempt workflow
AutoJob,
}
struct GitHubContext {
event_name: String,
branch_ref: String,
@ -130,24 +35,31 @@ impl GitHubContext {
fn get_run_type(&self) -> Option<RunType> {
match (self.event_name.as_str(), self.branch_ref.as_str()) {
("pull_request", _) => Some(RunType::PullRequest),
("push", "refs/heads/try-perf") => Some(RunType::TryJob { custom_jobs: None }),
("push", "refs/heads/try-perf") => Some(RunType::TryJob { job_patterns: None }),
("push", "refs/heads/try" | "refs/heads/automation/bors/try") => {
let custom_jobs = self.get_custom_jobs();
let custom_jobs = if !custom_jobs.is_empty() { Some(custom_jobs) } else { None };
Some(RunType::TryJob { custom_jobs })
let patterns = self.get_try_job_patterns();
let patterns = if !patterns.is_empty() { Some(patterns) } else { None };
Some(RunType::TryJob { job_patterns: patterns })
}
("push", "refs/heads/auto") => Some(RunType::AutoJob),
_ => None,
}
}
/// Tries to parse names of specific CI jobs that should be executed in the form of
/// try-job: <job-name>
/// from the commit message of the passed GitHub context.
fn get_custom_jobs(&self) -> Vec<String> {
/// Tries to parse patterns of CI jobs that should be executed
/// from the commit message of the passed GitHub context
///
/// They can be specified in the form of
/// try-job: <job-pattern>
/// or
/// try-job: `<job-pattern>`
/// (to avoid GitHub rendering the glob patterns as Markdown)
fn get_try_job_patterns(&self) -> Vec<String> {
if let Some(ref msg) = self.commit_message {
msg.lines()
.filter_map(|line| line.trim().strip_prefix("try-job: "))
// Strip backticks if present
.map(|l| l.trim_matches('`'))
.map(|l| l.trim().to_string())
.collect()
} else {
@ -164,15 +76,6 @@ fn load_github_ctx() -> anyhow::Result<GitHubContext> {
Ok(GitHubContext { event_name, branch_ref: load_env_var("GITHUB_REF")?, commit_message })
}
/// Skip CI jobs that are not supposed to be executed on the given `channel`.
fn skip_jobs(jobs: Vec<Job>, channel: &str) -> Vec<Job> {
jobs.into_iter()
.filter(|job| {
job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel)
})
.collect()
}
fn yaml_map_to_json(map: &BTreeMap<String, Value>) -> BTreeMap<String, serde_json::Value> {
map.into_iter()
.map(|(key, value)| {
@ -184,124 +87,13 @@ fn yaml_map_to_json(map: &BTreeMap<String, Value>) -> BTreeMap<String, serde_jso
.collect()
}
/// Maximum number of custom try jobs that can be requested in a single
/// `@bors try` request.
const MAX_TRY_JOBS_COUNT: usize = 20;
fn calculate_jobs(
run_type: &RunType,
db: &JobDatabase,
channel: &str,
) -> anyhow::Result<Vec<GithubActionsJob>> {
let (jobs, prefix, base_env) = match run_type {
RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env),
RunType::TryJob { custom_jobs } => {
let jobs = if let Some(custom_jobs) = custom_jobs {
if custom_jobs.len() > MAX_TRY_JOBS_COUNT {
return Err(anyhow::anyhow!(
"It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs",
custom_jobs.len()
));
}
let mut jobs = vec![];
let mut unknown_jobs = vec![];
for custom_job in custom_jobs {
if let Some(job) = db.find_auto_job_by_name(custom_job) {
jobs.push(job);
} else {
unknown_jobs.push(custom_job.clone());
}
}
if !unknown_jobs.is_empty() {
return Err(anyhow::anyhow!(
"Custom job(s) `{}` not found in auto jobs",
unknown_jobs.join(", ")
));
}
jobs
} else {
db.try_jobs.clone()
};
(jobs, "try", &db.envs.try_env)
}
RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env),
};
let jobs = skip_jobs(jobs, channel);
let jobs = jobs
.into_iter()
.map(|job| {
let mut env: BTreeMap<String, serde_json::Value> = yaml_map_to_json(base_env);
env.extend(yaml_map_to_json(&job.env));
let full_name = format!("{prefix} - {}", job.name);
GithubActionsJob {
name: job.name,
full_name,
os: job.os,
env,
extra_keys: yaml_map_to_json(&job.extra_keys),
}
})
.collect();
Ok(jobs)
}
fn calculate_job_matrix(
db: JobDatabase,
gh_ctx: GitHubContext,
channel: &str,
) -> anyhow::Result<()> {
let run_type = gh_ctx.get_run_type().ok_or_else(|| {
anyhow::anyhow!("Cannot determine the type of workflow that is being executed")
})?;
eprintln!("Run type: {run_type:?}");
let jobs = calculate_jobs(&run_type, &db, channel)?;
if jobs.is_empty() {
return Err(anyhow::anyhow!("Computed job list is empty"));
}
let run_type = match run_type {
RunType::PullRequest => "pr",
RunType::TryJob { .. } => "try",
RunType::AutoJob => "auto",
};
eprintln!("Output");
eprintln!("jobs={jobs:?}");
eprintln!("run_type={run_type}");
println!("jobs={}", serde_json::to_string(&jobs)?);
println!("run_type={run_type}");
Ok(())
}
fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> {
let Some(job) = jobs.iter().find(|j| j.name == name) else {
let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect();
let mut available_jobs =
available_jobs.iter().map(|j| j.name.to_string()).collect::<Vec<_>>();
available_jobs.sort();
return Err(anyhow::anyhow!(
"Job {name} not found. The following jobs are available:\n{}",
available_jobs.join(", ")
));
};
if !job.is_linux() {
return Err(anyhow::anyhow!("Only Linux jobs can be executed locally"));
}
Ok(job)
}
fn run_workflow_locally(db: JobDatabase, job_type: JobType, name: String) -> anyhow::Result<()> {
let jobs = match job_type {
JobType::Auto => &db.auto_jobs,
JobType::PR => &db.pr_jobs,
};
let job = find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?;
let job =
jobs::find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?;
let mut custom_env: BTreeMap<String, String> = BTreeMap::new();
// Replicate src/ci/scripts/setup-environment.sh
@ -385,7 +177,7 @@ enum Args {
}
#[derive(clap::ValueEnum, Clone)]
enum JobType {
pub enum JobType {
/// Merge attempt ("auto") job
Auto,
/// Pull request job
@ -395,7 +187,10 @@ enum JobType {
fn main() -> anyhow::Result<()> {
let args = Args::parse();
let default_jobs_file = Path::new(JOBS_YML_PATH);
let load_db = |jobs_path| load_job_db(jobs_path).context("Cannot load jobs.yml");
let load_db = |jobs_path| {
let db = utils::read_to_string(jobs_path)?;
Ok::<_, anyhow::Error>(jobs::load_job_db(&db).context("Cannot load jobs.yml")?)
};
match args {
Args::CalculateJobMatrix { jobs_file } => {
@ -407,7 +202,7 @@ fn main() -> anyhow::Result<()> {
.trim()
.to_string();
calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel)
jobs::calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel)
.context("Failed to calculate job matrix")?;
}
Args::RunJobLocally { job_type, name } => {

View file

@ -4,7 +4,7 @@ use std::collections::HashMap;
use anyhow::Context;
use build_helper::metrics::{JsonRoot, TestOutcome};
use crate::JobDatabase;
use crate::jobs::JobDatabase;
use crate::metrics::get_test_suites;
type Sha = String;

View file

@ -133,29 +133,37 @@ There are several use-cases for try builds:
Again, a working compiler build is needed for this, which can be produced by
the [dist-x86_64-linux] CI job.
- Run a specific CI job (e.g. Windows tests) on a PR, to quickly test if it
passes the test suite executed by that job. You can select which CI jobs will
be executed in the try build by adding up to 10 lines containing `try-job:
<name of job>` to the PR description. All such specified jobs will be executed
in the try build once the `@bors try` command is used on the PR. If no try
jobs are specified in this way, the jobs defined in the `try` section of
[`jobs.yml`] will be executed by default.
passes the test suite executed by that job.
You can select which CI jobs will
be executed in the try build by adding lines containing `try-job:
<job pattern>` to the PR description. All such specified jobs will be executed
in the try build once the `@bors try` command is used on the PR. If no try
jobs are specified in this way, the jobs defined in the `try` section of
[`jobs.yml`] will be executed by default.
Each pattern can either be an exact name of a job or a glob pattern that matches multiple jobs,
for example `*msvc*` or `*-alt`. You can start at most 20 jobs in a single try build. When using
glob patterns, you might want to wrap them in backticks (`` ` ``) to avoid GitHub rendering
the pattern as Markdown.
> **Using `try-job` PR description directives**
>
> 1. Identify which set of try-jobs (max 10) you would like to exercise. You can
> 1. Identify which set of try-jobs you would like to exercise. You can
> find the name of the CI jobs in [`jobs.yml`].
>
> 2. Amend PR description to include (usually at the end of the PR description)
> e.g.
> 2. Amend PR description to include a set of patterns (usually at the end
> of the PR description), for example:
>
> ```text
> This PR fixes #123456.
>
> try-job: x86_64-msvc
> try-job: test-various
> try-job: `*-alt`
> ```
>
> Each `try-job` directive must be on its own line.
> Each `try-job` pattern must be on its own line.
>
> 3. Run the prescribed try jobs with `@bors try`. As aforementioned, this
> requires the user to either (1) have `try` permissions or (2) be delegated

View file

@ -72,6 +72,7 @@
- [powerpc-unknown-linux-gnuspe](platform-support/powerpc-unknown-linux-gnuspe.md)
- [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md)
- [powerpc64-ibm-aix](platform-support/aix.md)
- [powerpc64le-unknown-linux-gnu](platform-support/powerpc64le-unknown-linux-gnu.md)
- [powerpc64le-unknown-linux-musl](platform-support/powerpc64le-unknown-linux-musl.md)
- [riscv32e\*-unknown-none-elf](platform-support/riscv32e-unknown-none-elf.md)
- [riscv32i\*-unknown-none-elf](platform-support/riscv32-unknown-none-elf.md)

View file

@ -96,7 +96,7 @@ target | notes
[`loongarch64-unknown-linux-musl`](platform-support/loongarch-linux.md) | LoongArch64 Linux, LP64D ABI (kernel 5.19, musl 1.2.5)
`powerpc-unknown-linux-gnu` | PowerPC Linux (kernel 3.2, glibc 2.17)
`powerpc64-unknown-linux-gnu` | PPC64 Linux (kernel 3.2, glibc 2.17)
`powerpc64le-unknown-linux-gnu` | PPC64LE Linux (kernel 3.10, glibc 2.17)
[`powerpc64le-unknown-linux-gnu`](platform-support/powerpc64le-unknown-linux-gnu.md) | PPC64LE Linux (kernel 3.10, glibc 2.17)
[`powerpc64le-unknown-linux-musl`](platform-support/powerpc64le-unknown-linux-musl.md) | PPC64LE Linux (kernel 4.19, musl 1.2.3)
[`riscv64gc-unknown-linux-gnu`](platform-support/riscv64gc-unknown-linux-gnu.md) | RISC-V Linux (kernel 4.20, glibc 2.29)
[`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20, musl 1.2.3)

View file

@ -0,0 +1,47 @@
# `powerpc64le-unknown-linux-gnu`
**Tier: 2**
Target for 64-bit little endian PowerPC Linux programs
## Target maintainers
- David Tenty `daltenty@ibm.com`, https://github.com/daltenty
- Chris Cambly, `ccambly@ca.ibm.com`, https://github.com/gilamn5tr
## Requirements
Building the target itself requires a 64-bit little endian PowerPC compiler that is supported by `cc-rs`.
## Building the target
The target can be built by enabling it for a `rustc` build.
```toml
[build]
target = ["powerpc64le-unknown-linux-gnu"]
```
Make sure your C compiler is included in `$PATH`, then add it to the `config.toml`:
```toml
[target.powerpc64le-unknown-linux-gnu]
cc = "powerpc64le-linux-gnu-gcc"
cxx = "powerpc64le-linux-gnu-g++"
ar = "powerpc64le-linux-gnu-ar"
linker = "powerpc64le-linux-gnu-gcc"
```
## Building Rust programs
This target is distributed through `rustup`, and requires no special
configuration.
## Cross-compilation
This target can be cross-compiled from any host.
## Testing
This target can be tested as normal with `x.py` on a 64-bit little endian
PowerPC host or via QEMU emulation.

View file

@ -0,0 +1,16 @@
# `crate-attr`
The tracking issue for this feature is: [#138287](https://github.com/rust-lang/rust/issues/138287).
------------------------
The `-Z crate-attr` flag allows you to inject attributes into the crate root.
For example, `-Z crate-attr=crate_name="test"` acts as if `#![crate_name="test"]` were present before the first source line of the crate root.
To inject multiple attributes, pass `-Z crate-attr` multiple times.
Formally, the expansion behaves as follows:
1. The crate is parsed as if `-Z crate-attr` were not present.
2. The attributes in `-Z crate-attr` are parsed.
3. The attributes are injected at the top of the crate root.
4. Macro expansion is performed.

View file

@ -1,5 +1,6 @@
//@ ignore-windows-gnu: #128981
//@ ignore-android: FIXME(#10381)
//@ ignore-aix: FIXME(#137965)
//@ compile-flags:-g
// === GDB TESTS ===================================================================================

View file

@ -6,6 +6,6 @@ extern crate std;
//@ pretty-mode:hir
//@ pp-exact:hir-pretty-attr.pp
#[attr="Repr([ReprC, ReprPacked(Align(4 bytes)), ReprTransparent])")]
#[attr = Repr([ReprC, ReprPacked(Align(4 bytes)), ReprTransparent])]
struct Example {
}

View file

@ -1,5 +1,5 @@
#[repr(i32)]
//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr=\"Repr([ReprInt(SignedInt(I32))])\")]\n"]'
//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr = Repr([ReprInt(SignedInt(I32))])]\n"]'
pub enum Foo {
//@ is "$.index[*][?(@.name=='Struct')].inner.variant.discriminant" null
//@ count "$.index[*][?(@.name=='Struct')].inner.variant.kind.struct.fields[*]" 0

View file

@ -1,5 +1,5 @@
#[repr(u32)]
//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr=\"Repr([ReprInt(UnsignedInt(U32))])\")]\n"]'
//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr = Repr([ReprInt(UnsignedInt(U32))])]\n"]'
pub enum Foo {
//@ is "$.index[*][?(@.name=='Tuple')].inner.variant.discriminant" null
//@ count "$.index[*][?(@.name=='Tuple')].inner.variant.kind.tuple[*]" 0

View file

@ -0,0 +1,7 @@
// Ensure that `-Z crate-attr=cfg(FALSE)` can comment out the whole crate
//@ compile-flags: --crate-type=lib -Zcrate-attr=cfg(FALSE)
//@ check-pass
// NOTE: duplicate items are load-bearing
fn foo() {}
fn foo() {}

View file

@ -0,0 +1,5 @@
//@ check-pass
//@ compile-flags: -Zcrate-attr=/*hi-there*/feature(rustc_attrs)
#[rustc_dummy]
fn main() {}

View file

@ -0,0 +1,6 @@
// Ensure that `crate_name` and `crate_type` can be set through `-Z crate-attr`.
//@ check-pass
//@ compile-flags: -Zcrate-attr=crate_name="override"
fn main() {
assert_eq!(module_path!(), "r#override");
}

View file

@ -0,0 +1,3 @@
//@ check-pass
//@ compile-flags: -Zcrate-attr=crate_type="lib"
// notice the lack of `main` is load-bearing

View file

@ -0,0 +1,4 @@
// Show diagnostics for invalid tokens
//@ compile-flags: -Zcrate-attr=`%~@$#
//@ error-pattern:unknown start of token
fn main() {}

View file

@ -0,0 +1,20 @@
error: unknown start of token: `
--> <crate attribute>:1:1
|
LL | `%~@$#
| ^
|
help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
|
LL - `%~@$#
LL + '%~@$#
|
error: expected identifier, found `%`
--> <crate attribute>:1:2
|
LL | `%~@$#
| ^ expected identifier
error: aborting due to 2 previous errors

View file

@ -0,0 +1,3 @@
//@ compile-flags: '-Zcrate-attr=feature(yeet_expr)]fn main(){}#[inline'
//@ error-pattern:unexpected closing delimiter
fn foo() {}

View file

@ -0,0 +1,8 @@
error: unexpected closing delimiter: `]`
--> <crate attribute>:1:19
|
LL | feature(yeet_expr)]fn main(){}#[inline
| ^ unexpected closing delimiter
error: aborting due to 1 previous error

View file

@ -0,0 +1,4 @@
//@ compile-flags: -Zcrate-attr=#![feature(foo)]
//@ error-pattern:expected identifier
fn main() {}

View file

@ -0,0 +1,8 @@
error: expected identifier, found `#`
--> <crate attribute>:1:1
|
LL | #![feature(foo)]
| ^ expected identifier
error: aborting due to 1 previous error

View file

@ -0,0 +1,3 @@
//@ compile-flags: -Zcrate-attr=feature(foo),feature(bar)
//@ error-pattern:invalid crate attr
fn main() {}

View file

@ -0,0 +1,8 @@
error: invalid crate attribute
--> <crate attribute>:1:1
|
LL | feature(foo),feature(bar)
| ^^^^^^^^^^^^^
error: aborting due to 1 previous error

View file

@ -0,0 +1,9 @@
// Make sure that existing root attributes are still respected even when `-Zcrate-attr` is present.
//@ run-pass
//@ compile-flags: -Zcrate-attr=feature(rustc_attrs)
#![crate_name = "override"]
#[rustc_dummy]
fn main() {
assert_eq!(module_path!(), "r#override");
}

View file

@ -0,0 +1,6 @@
#!/usr/bin/env -S cargo +nightly -Zscript
// Make sure that shebangs are still allowed even when `-Zcrate-attr` is present.
//@ check-pass
//@ compile-flags: -Zcrate-attr=feature(rustc_attrs)
#[rustc_dummy]
fn main() {}

View file

@ -0,0 +1,4 @@
// Show diagnostics for unbalanced parens.
//@ compile-flags: -Zcrate-attr=(
//@ error-pattern:unclosed delimiter
fn main() {}

View file

@ -0,0 +1,10 @@
error: this file contains an unclosed delimiter
--> <crate attribute>:1:2
|
LL | (
| -^
| |
| unclosed delimiter
error: aborting due to 1 previous error

View file

@ -2,6 +2,9 @@
// on 32bit and 16bit platforms it is plausible that the maximum allocation size will succeed
// FIXME (#135952) In some cases on AArch64 Linux the diagnostic does not trigger
//@ ignore-aarch64-unknown-linux-gnu
// AIX will allow the allocation to go through, and get SIGKILL when zero initializing
// the overcommitted page.
//@ ignore-aix
const FOO: () = {
// 128 TiB, unlikely anyone has that much RAM

View file

@ -1,11 +1,11 @@
error[E0080]: evaluation of constant value failed
--> $DIR/large_const_alloc.rs:8:13
--> $DIR/large_const_alloc.rs:11:13
|
LL | let x = [0_u8; (1 << 47) - 1];
| ^^^^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler
error[E0080]: could not evaluate static initializer
--> $DIR/large_const_alloc.rs:13:13
--> $DIR/large_const_alloc.rs:16:13
|
LL | let x = [0_u8; (1 << 47) - 1];
| ^^^^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler

View file

@ -5,6 +5,9 @@
//@ only-64bit
// FIXME (#135952) In some cases on AArch64 Linux the diagnostic does not trigger
//@ ignore-aarch64-unknown-linux-gnu
// AIX will allow the allocation to go through, and get SIGKILL when zero initializing
// the overcommitted page.
//@ ignore-aix
pub struct Data([u8; (1 << 47) - 1]);
const _: &'static Data = &Data([0; (1 << 47) - 1]);

View file

@ -1,5 +1,5 @@
error[E0080]: evaluation of constant value failed
--> $DIR/promoted_running_out_of_memory_issue-130687.rs:10:32
--> $DIR/promoted_running_out_of_memory_issue-130687.rs:13:32
|
LL | const _: &'static Data = &Data([0; (1 << 47) - 1]);
| ^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler

View file

@ -5,6 +5,7 @@ LL | #![allow(unqualified_local_imports)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the `unqualified_local_imports` lint is unstable
= note: see issue #138299 <https://github.com/rust-lang/rust/issues/138299> for more information
= help: add `#![feature(unqualified_local_imports)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
= note: `#[warn(unknown_lints)]` on by default

View file

@ -1,8 +1,6 @@
//@ compile-flags: -Zunpretty=hir
//@ check-pass
// FIXME(jdonszelmann): the pretty printing output for deprecated (and possibly more attrs) is
// slightly broken.
#[deprecated]
pub struct PlainDeprecated;

View file

@ -5,24 +5,21 @@ extern crate std;
//@ compile-flags: -Zunpretty=hir
//@ check-pass
// FIXME(jdonszelmann): the pretty printing output for deprecated (and possibly more attrs) is
// slightly broken.
#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote:
suggestion: }span: }")]
#[attr = Deprecation {deprecation: Deprecation {since: Unspecified}}]
struct PlainDeprecated;
#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote:
here's why this is deprecatedsuggestion: }span: }")]
#[attr = Deprecation {deprecation: Deprecation {since: Unspecified, note:
"here's why this is deprecated"}}]
struct DirectNote;
#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote:
here's why this is deprecatedsuggestion: }span: }")]
#[attr = Deprecation {deprecation: Deprecation {since: Unspecified, note:
"here's why this is deprecated"}}]
struct ExplicitNote;
#[attr="Deprecation{deprecation: Deprecation{since: NonStandard(1.2.3)note:
here's why this is deprecatedsuggestion: }span: }")]
#[attr = Deprecation {deprecation: Deprecation {since: NonStandard("1.2.3"),
note: "here's why this is deprecated"}}]
struct SinceAndNote;
#[attr="Deprecation{deprecation: Deprecation{since: NonStandard(1.2.3)note:
here's why this is deprecatedsuggestion: }span: }")]
#[attr = Deprecation {deprecation: Deprecation {since: NonStandard("1.2.3"),
note: "here's why this is deprecated"}}]
struct FlippedOrder;