Print test diffs into GitHub summary
So that we can also observe them for try builds, before merging a PR.
This commit is contained in:
parent
6c24c9c088
commit
30d57576b9
2 changed files with 40 additions and 6 deletions
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
|
@ -252,7 +252,12 @@ jobs:
|
|||
exit 0
|
||||
fi
|
||||
|
||||
# Get closest bors merge commit
|
||||
PARENT_COMMIT=`git rev-list --author='bors <bors@rust-lang.org>' -n1 --first-parent HEAD^1`
|
||||
|
||||
./build/citool/debug/citool postprocess-metrics \
|
||||
--job-name ${CI_JOB_NAME} \
|
||||
--parent ${PARENT_COMMIT} \
|
||||
${METRICS} >> ${GITHUB_STEP_SUMMARY}
|
||||
|
||||
- name: upload job metrics to DataDog
|
||||
|
|
|
@ -5,7 +5,7 @@ mod jobs;
|
|||
mod metrics;
|
||||
mod utils;
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
|
@ -18,7 +18,7 @@ use crate::analysis::output_test_diffs;
|
|||
use crate::cpu_usage::load_cpu_usage;
|
||||
use crate::datadog::upload_datadog_metric;
|
||||
use crate::jobs::RunType;
|
||||
use crate::metrics::{download_auto_job_metrics, load_metrics};
|
||||
use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
|
||||
use crate::utils::load_env_var;
|
||||
use analysis::output_bootstrap_stats;
|
||||
|
||||
|
@ -138,6 +138,27 @@ fn upload_ci_metrics(cpu_usage_csv: &Path) -> anyhow::Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn postprocess_metrics(
|
||||
metrics_path: PathBuf,
|
||||
parent: Option<String>,
|
||||
job_name: Option<String>,
|
||||
) -> anyhow::Result<()> {
|
||||
let metrics = load_metrics(&metrics_path)?;
|
||||
output_bootstrap_stats(&metrics);
|
||||
|
||||
let (Some(parent), Some(job_name)) = (parent, job_name) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let parent_metrics =
|
||||
download_job_metrics(&job_name, &parent).context("cannot download parent metrics")?;
|
||||
let job_metrics =
|
||||
HashMap::from([(job_name, JobMetrics { parent: Some(parent_metrics), current: metrics })]);
|
||||
output_test_diffs(job_metrics);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
enum Args {
|
||||
/// Calculate a list of jobs that should be executed on CI.
|
||||
|
@ -155,10 +176,19 @@ enum Args {
|
|||
#[clap(long = "type", default_value = "auto")]
|
||||
job_type: JobType,
|
||||
},
|
||||
/// Postprocess the metrics.json file generated by bootstrap.
|
||||
/// Postprocess the metrics.json file generated by bootstrap and output
|
||||
/// various statistics.
|
||||
/// If `--parent` and `--job-name` are provided, also display a diff
|
||||
/// against previous metrics that are downloaded from CI.
|
||||
PostprocessMetrics {
|
||||
/// Path to the metrics.json file
|
||||
metrics_path: PathBuf,
|
||||
/// A parent SHA against which to compare.
|
||||
#[clap(long, requires("job_name"))]
|
||||
parent: Option<String>,
|
||||
/// The name of the current job.
|
||||
#[clap(long, requires("parent"))]
|
||||
job_name: Option<String>,
|
||||
},
|
||||
/// Upload CI metrics to Datadog.
|
||||
UploadBuildMetrics {
|
||||
|
@ -209,9 +239,8 @@ fn main() -> anyhow::Result<()> {
|
|||
Args::UploadBuildMetrics { cpu_usage_csv } => {
|
||||
upload_ci_metrics(&cpu_usage_csv)?;
|
||||
}
|
||||
Args::PostprocessMetrics { metrics_path } => {
|
||||
let metrics = load_metrics(&metrics_path)?;
|
||||
output_bootstrap_stats(&metrics);
|
||||
Args::PostprocessMetrics { metrics_path, parent, job_name } => {
|
||||
postprocess_metrics(metrics_path, parent, job_name)?;
|
||||
}
|
||||
Args::PostMergeReport { current, parent } => {
|
||||
let db = load_db(default_jobs_file)?;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue