diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0cfbb4e77c4..a91598586b9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -182,6 +182,13 @@ jobs: - name: show the current environment run: src/ci/scripts/dump-environment.sh + # Pre-build citool before the following step uninstalls rustup + # Build is into the build directory, to avoid modifying sources + - name: build citool + run: | + cd src/ci/citool + CARGO_TARGET_DIR=../../../build/citool cargo build + - name: run the build # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs. run: src/ci/scripts/run-build-from-ci.sh 2>&1 @@ -218,6 +225,16 @@ jobs: # erroring about invalid credentials instead. if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1' + - name: postprocess metrics into the summary + run: | + if [ -f build/metrics.json ]; then + ./build/citool/debug/citool postprocess-metrics build/metrics.json ${GITHUB_STEP_SUMMARY} + elif [ -f obj/build/metrics.json ]; then + ./build/citool/debug/citool postprocess-metrics obj/build/metrics.json ${GITHUB_STEP_SUMMARY} + else + echo "No metrics.json found" + fi + - name: upload job metrics to DataDog if: needs.calculate_matrix.outputs.run_type != 'pr' env: diff --git a/src/ci/citool/Cargo.lock b/src/ci/citool/Cargo.lock index 39b6b44da64..e5be2d42472 100644 --- a/src/ci/citool/Cargo.lock +++ b/src/ci/citool/Cargo.lock @@ -58,11 +58,20 @@ version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +[[package]] +name = "build_helper" +version = "0.1.0" +dependencies = [ + "serde", + "serde_derive", +] + [[package]] name = "citool" version = "0.1.0" dependencies = [ "anyhow", + "build_helper", "clap", "insta", "serde", diff --git a/src/ci/citool/Cargo.toml b/src/ci/citool/Cargo.toml index e77c67c7147..11172832cb8 100644 --- a/src/ci/citool/Cargo.toml +++ b/src/ci/citool/Cargo.toml @@ -10,6 +10,8 @@ serde = { version = "1", features = ["derive"] } serde_yaml = "0.9" serde_json = "1" +build_helper = { path = "../../build_helper" } + [dev-dependencies] insta = "1" diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs index ad9cc8b82a6..cef92e998da 100644 --- a/src/ci/citool/src/main.rs +++ b/src/ci/citool/src/main.rs @@ -1,3 +1,5 @@ +mod metrics; + use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use std::process::Command; @@ -6,6 +8,8 @@ use anyhow::Context; use clap::Parser; use serde_yaml::Value; +use crate::metrics::postprocess_metrics; + const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/.."); const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker"); const JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../github-actions/jobs.yml"); @@ -338,6 +342,14 @@ enum Args { #[clap(long = "type", default_value = "auto")] job_type: JobType, }, + /// Postprocess the metrics.json file generated by bootstrap. + PostprocessMetrics { + /// Path to the metrics.json file + metrics_path: PathBuf, + /// Path to a file where the postprocessed metrics summary will be stored. + /// Usually, this will be GITHUB_STEP_SUMMARY on CI. + summary_path: PathBuf, + }, } #[derive(clap::ValueEnum, Clone)] @@ -369,6 +381,9 @@ fn main() -> anyhow::Result<()> { Args::RunJobLocally { job_type, name } => { run_workflow_locally(load_db(default_jobs_file)?, job_type, name)? } + Args::PostprocessMetrics { metrics_path, summary_path } => { + postprocess_metrics(&metrics_path, &summary_path)?; + } } Ok(()) diff --git a/src/ci/citool/src/metrics.rs b/src/ci/citool/src/metrics.rs new file mode 100644 index 00000000000..9a1c7c4d910 --- /dev/null +++ b/src/ci/citool/src/metrics.rs @@ -0,0 +1,138 @@ +use std::collections::BTreeMap; +use std::fs::File; +use std::io::Write; +use std::path::Path; + +use anyhow::Context; +use build_helper::metrics::{JsonNode, JsonRoot, TestOutcome, TestSuite, TestSuiteMetadata}; + +pub fn postprocess_metrics(metrics_path: &Path, summary_path: &Path) -> anyhow::Result<()> { + let metrics = load_metrics(metrics_path)?; + + let mut file = File::options() + .append(true) + .create(true) + .open(summary_path) + .with_context(|| format!("Cannot open summary file at {summary_path:?}"))?; + + record_test_suites(&metrics, &mut file)?; + + Ok(()) +} + +fn record_test_suites(metrics: &JsonRoot, file: &mut File) -> anyhow::Result<()> { + let suites = get_test_suites(&metrics); + + if !suites.is_empty() { + let aggregated = aggregate_test_suites(&suites); + let table = render_table(aggregated); + writeln!(file, "\n# Test results\n")?; + writeln!(file, "{table}")?; + } else { + eprintln!("No test suites found in metrics"); + } + + Ok(()) +} + +fn render_table(suites: BTreeMap) -> String { + use std::fmt::Write; + + let mut table = "| Test suite | Passed ✅ | Ignored 🚫 | Failed ❌ |\n".to_string(); + writeln!(table, "|:------|------:|------:|------:|").unwrap(); + + fn write_row( + buffer: &mut String, + name: &str, + record: &TestSuiteRecord, + surround: &str, + ) -> std::fmt::Result { + let TestSuiteRecord { passed, ignored, failed } = record; + let total = (record.passed + record.ignored + record.failed) as f64; + let passed_pct = ((*passed as f64) / total) * 100.0; + let ignored_pct = ((*ignored as f64) / total) * 100.0; + let failed_pct = ((*failed as f64) / total) * 100.0; + + write!(buffer, "| {surround}{name}{surround} |")?; + write!(buffer, " {surround}{passed} ({passed_pct:.0}%){surround} |")?; + write!(buffer, " {surround}{ignored} ({ignored_pct:.0}%){surround} |")?; + writeln!(buffer, " {surround}{failed} ({failed_pct:.0}%){surround} |")?; + + Ok(()) + } + + let mut total = TestSuiteRecord::default(); + for (name, record) in suites { + write_row(&mut table, &name, &record, "").unwrap(); + total.passed += record.passed; + total.ignored += record.ignored; + total.failed += record.failed; + } + write_row(&mut table, "Total", &total, "**").unwrap(); + table +} + +#[derive(Default)] +struct TestSuiteRecord { + passed: u64, + ignored: u64, + failed: u64, +} + +fn aggregate_test_suites(suites: &[&TestSuite]) -> BTreeMap { + let mut records: BTreeMap = BTreeMap::new(); + for suite in suites { + let name = match &suite.metadata { + TestSuiteMetadata::CargoPackage { crates, stage, .. } => { + format!("{} (stage {stage})", crates.join(", ")) + } + TestSuiteMetadata::Compiletest { suite, stage, .. } => { + format!("{suite} (stage {stage})") + } + }; + let record = records.entry(name).or_default(); + for test in &suite.tests { + match test.outcome { + TestOutcome::Passed => { + record.passed += 1; + } + TestOutcome::Failed => { + record.failed += 1; + } + TestOutcome::Ignored { .. } => { + record.ignored += 1; + } + } + } + } + records +} + +fn get_test_suites(metrics: &JsonRoot) -> Vec<&TestSuite> { + fn visit_test_suites<'a>(nodes: &'a [JsonNode], suites: &mut Vec<&'a TestSuite>) { + for node in nodes { + match node { + JsonNode::RustbuildStep { children, .. } => { + visit_test_suites(&children, suites); + } + JsonNode::TestSuite(suite) => { + suites.push(&suite); + } + } + } + } + + let mut suites = vec![]; + for invocation in &metrics.invocations { + visit_test_suites(&invocation.children, &mut suites); + } + suites +} + +fn load_metrics(path: &Path) -> anyhow::Result { + let metrics = std::fs::read_to_string(path) + .with_context(|| format!("Cannot read JSON metrics from {path:?}"))?; + let metrics: JsonRoot = serde_json::from_str(&metrics) + .with_context(|| format!("Cannot deserialize JSON metrics from {path:?}"))?; + Ok(metrics) +}