diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ffcdc40de3a..aaae67c28bc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -252,7 +252,12 @@ jobs: exit 0 fi + # Get closest bors merge commit + PARENT_COMMIT=`git rev-list --author='bors ' -n1 --first-parent HEAD^1` + ./build/citool/debug/citool postprocess-metrics \ + --job-name ${CI_JOB_NAME} \ + --parent ${PARENT_COMMIT} \ ${METRICS} >> ${GITHUB_STEP_SUMMARY} - name: upload job metrics to DataDog diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs index 5f1932854b5..fb0639367bd 100644 --- a/src/ci/citool/src/main.rs +++ b/src/ci/citool/src/main.rs @@ -5,7 +5,7 @@ mod jobs; mod metrics; mod utils; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use std::path::{Path, PathBuf}; use std::process::Command; @@ -18,7 +18,7 @@ use crate::analysis::output_test_diffs; use crate::cpu_usage::load_cpu_usage; use crate::datadog::upload_datadog_metric; use crate::jobs::RunType; -use crate::metrics::{download_auto_job_metrics, load_metrics}; +use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics}; use crate::utils::load_env_var; use analysis::output_bootstrap_stats; @@ -138,6 +138,27 @@ fn upload_ci_metrics(cpu_usage_csv: &Path) -> anyhow::Result<()> { Ok(()) } +fn postprocess_metrics( + metrics_path: PathBuf, + parent: Option, + job_name: Option, +) -> anyhow::Result<()> { + let metrics = load_metrics(&metrics_path)?; + output_bootstrap_stats(&metrics); + + let (Some(parent), Some(job_name)) = (parent, job_name) else { + return Ok(()); + }; + + let parent_metrics = + download_job_metrics(&job_name, &parent).context("cannot download parent metrics")?; + let job_metrics = + HashMap::from([(job_name, JobMetrics { parent: Some(parent_metrics), current: metrics })]); + output_test_diffs(job_metrics); + + Ok(()) +} + #[derive(clap::Parser)] enum Args { /// Calculate a list of jobs that should be executed on CI. @@ -155,10 +176,19 @@ enum Args { #[clap(long = "type", default_value = "auto")] job_type: JobType, }, - /// Postprocess the metrics.json file generated by bootstrap. + /// Postprocess the metrics.json file generated by bootstrap and output + /// various statistics. + /// If `--parent` and `--job-name` are provided, also display a diff + /// against previous metrics that are downloaded from CI. PostprocessMetrics { /// Path to the metrics.json file metrics_path: PathBuf, + /// A parent SHA against which to compare. + #[clap(long, requires("job_name"))] + parent: Option, + /// The name of the current job. + #[clap(long, requires("parent"))] + job_name: Option, }, /// Upload CI metrics to Datadog. UploadBuildMetrics { @@ -209,9 +239,8 @@ fn main() -> anyhow::Result<()> { Args::UploadBuildMetrics { cpu_usage_csv } => { upload_ci_metrics(&cpu_usage_csv)?; } - Args::PostprocessMetrics { metrics_path } => { - let metrics = load_metrics(&metrics_path)?; - output_bootstrap_stats(&metrics); + Args::PostprocessMetrics { metrics_path, parent, job_name } => { + postprocess_metrics(metrics_path, parent, job_name)?; } Args::PostMergeReport { current, parent } => { let db = load_db(default_jobs_file)?;