1
Fork 0

Rollup merge of #138307 - Kobzol:citool-alias, r=marcoieni

Allow specifying glob patterns for try jobs

This PR modifies the `try-job` lookup logic to allow glob patterns. So you can e.g. request all MSVC-related jobs with `try-job: *msvc*`.

Best reviewed commit by commit.

r? ``````@marcoieni``````

try-job: `*msvc*`
This commit is contained in:
Jakub Beránek 2025-03-11 13:30:54 +01:00 committed by GitHub
commit 3e67637c13
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 361 additions and 242 deletions

View file

@ -107,6 +107,7 @@ dependencies = [
"build_helper",
"clap",
"csv",
"glob-match",
"insta",
"serde",
"serde_json",
@ -308,6 +309,12 @@ dependencies = [
"wasi",
]
[[package]]
name = "glob-match"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d"
[[package]]
name = "hashbrown"
version = "0.15.2"

View file

@ -7,6 +7,7 @@ edition = "2021"
anyhow = "1"
clap = { version = "4.5", features = ["derive"] }
csv = "1"
glob-match = "0.2"
serde = { version = "1", features = ["derive"] }
serde_yaml = "0.9"
serde_json = "1"

244
src/ci/citool/src/jobs.rs Normal file
View file

@ -0,0 +1,244 @@
#[cfg(test)]
mod tests;
use std::collections::BTreeMap;
use serde_yaml::Value;
use crate::GitHubContext;
/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file.
#[derive(serde::Deserialize, Debug, Clone)]
pub struct Job {
/// Name of the job, e.g. mingw-check
pub name: String,
/// GitHub runner on which the job should be executed
pub os: String,
pub env: BTreeMap<String, Value>,
/// Should the job be only executed on a specific channel?
#[serde(default)]
pub only_on_channel: Option<String>,
/// Do not cancel the whole workflow if this job fails.
#[serde(default)]
pub continue_on_error: Option<bool>,
/// Free additional disk space in the job, by removing unused packages.
#[serde(default)]
pub free_disk: Option<bool>,
}
impl Job {
/// By default, the Docker image of a job is based on its name.
/// However, it can be overridden by its IMAGE environment variable.
pub fn image(&self) -> String {
self.env
.get("IMAGE")
.map(|v| v.as_str().expect("IMAGE value should be a string").to_string())
.unwrap_or_else(|| self.name.clone())
}
fn is_linux(&self) -> bool {
self.os.contains("ubuntu")
}
}
#[derive(serde::Deserialize, Debug)]
struct JobEnvironments {
#[serde(rename = "pr")]
pr_env: BTreeMap<String, Value>,
#[serde(rename = "try")]
try_env: BTreeMap<String, Value>,
#[serde(rename = "auto")]
auto_env: BTreeMap<String, Value>,
}
#[derive(serde::Deserialize, Debug)]
pub struct JobDatabase {
#[serde(rename = "pr")]
pub pr_jobs: Vec<Job>,
#[serde(rename = "try")]
pub try_jobs: Vec<Job>,
#[serde(rename = "auto")]
pub auto_jobs: Vec<Job>,
/// Shared environments for the individual run types.
envs: JobEnvironments,
}
impl JobDatabase {
/// Find `auto` jobs that correspond to the passed `pattern`.
/// Patterns are matched using the glob syntax.
/// For example `dist-*` matches all jobs starting with `dist-`.
fn find_auto_jobs_by_pattern(&self, pattern: &str) -> Vec<Job> {
self.auto_jobs
.iter()
.filter(|j| glob_match::glob_match(pattern, &j.name))
.cloned()
.collect()
}
}
pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> {
let mut db: Value = serde_yaml::from_str(&db)?;
// We need to expand merge keys (<<), because serde_yaml can't deal with them
// `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges.
db.apply_merge()?;
db.apply_merge()?;
let db: JobDatabase = serde_yaml::from_value(db)?;
Ok(db)
}
/// Representation of a job outputted to a GitHub Actions workflow.
#[derive(serde::Serialize, Debug)]
struct GithubActionsJob {
/// The main identifier of the job, used by CI scripts to determine what should be executed.
name: String,
/// Helper label displayed in GitHub Actions interface, containing the job name and a run type
/// prefix (PR/try/auto).
full_name: String,
os: String,
env: BTreeMap<String, serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
continue_on_error: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
free_disk: Option<bool>,
}
/// Skip CI jobs that are not supposed to be executed on the given `channel`.
fn skip_jobs(jobs: Vec<Job>, channel: &str) -> Vec<Job> {
jobs.into_iter()
.filter(|job| {
job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel)
})
.collect()
}
/// Type of workflow that is being executed on CI
#[derive(Debug)]
pub enum RunType {
/// Workflows that run after a push to a PR branch
PullRequest,
/// Try run started with @bors try
TryJob { job_patterns: Option<Vec<String>> },
/// Merge attempt workflow
AutoJob,
}
/// Maximum number of custom try jobs that can be requested in a single
/// `@bors try` request.
const MAX_TRY_JOBS_COUNT: usize = 20;
fn calculate_jobs(
run_type: &RunType,
db: &JobDatabase,
channel: &str,
) -> anyhow::Result<Vec<GithubActionsJob>> {
let (jobs, prefix, base_env) = match run_type {
RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env),
RunType::TryJob { job_patterns } => {
let jobs = if let Some(patterns) = job_patterns {
let mut jobs: Vec<Job> = vec![];
let mut unknown_patterns = vec![];
for pattern in patterns {
let matched_jobs = db.find_auto_jobs_by_pattern(pattern);
if matched_jobs.is_empty() {
unknown_patterns.push(pattern.clone());
} else {
for job in matched_jobs {
if !jobs.iter().any(|j| j.name == job.name) {
jobs.push(job);
}
}
}
}
if !unknown_patterns.is_empty() {
return Err(anyhow::anyhow!(
"Patterns `{}` did not match any auto jobs",
unknown_patterns.join(", ")
));
}
if jobs.len() > MAX_TRY_JOBS_COUNT {
return Err(anyhow::anyhow!(
"It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs expanded from {} pattern(s)",
jobs.len(),
patterns.len()
));
}
jobs
} else {
db.try_jobs.clone()
};
(jobs, "try", &db.envs.try_env)
}
RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env),
};
let jobs = skip_jobs(jobs, channel);
let jobs = jobs
.into_iter()
.map(|job| {
let mut env: BTreeMap<String, serde_json::Value> = crate::yaml_map_to_json(base_env);
env.extend(crate::yaml_map_to_json(&job.env));
let full_name = format!("{prefix} - {}", job.name);
GithubActionsJob {
name: job.name,
full_name,
os: job.os,
env,
continue_on_error: job.continue_on_error,
free_disk: job.free_disk,
}
})
.collect();
Ok(jobs)
}
pub fn calculate_job_matrix(
db: JobDatabase,
gh_ctx: GitHubContext,
channel: &str,
) -> anyhow::Result<()> {
let run_type = gh_ctx.get_run_type().ok_or_else(|| {
anyhow::anyhow!("Cannot determine the type of workflow that is being executed")
})?;
eprintln!("Run type: {run_type:?}");
let jobs = calculate_jobs(&run_type, &db, channel)?;
if jobs.is_empty() {
return Err(anyhow::anyhow!("Computed job list is empty"));
}
let run_type = match run_type {
RunType::PullRequest => "pr",
RunType::TryJob { .. } => "try",
RunType::AutoJob => "auto",
};
eprintln!("Output");
eprintln!("jobs={jobs:?}");
eprintln!("run_type={run_type}");
println!("jobs={}", serde_json::to_string(&jobs)?);
println!("run_type={run_type}");
Ok(())
}
pub fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> {
let Some(job) = jobs.iter().find(|j| j.name == name) else {
let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect();
let mut available_jobs =
available_jobs.iter().map(|j| j.name.to_string()).collect::<Vec<_>>();
available_jobs.sort();
return Err(anyhow::anyhow!(
"Job {name} not found. The following jobs are available:\n{}",
available_jobs.join(", ")
));
};
if !job.is_linux() {
return Err(anyhow::anyhow!("Only Linux jobs can be executed locally"));
}
Ok(job)
}

View file

@ -0,0 +1,64 @@
use crate::jobs::{JobDatabase, load_job_db};
#[test]
fn lookup_job_pattern() {
let db = load_job_db(
r#"
envs:
pr:
try:
auto:
pr:
try:
auto:
- name: dist-a
os: ubuntu
env: {}
- name: dist-a-alt
os: ubuntu
env: {}
- name: dist-b
os: ubuntu
env: {}
- name: dist-b-alt
os: ubuntu
env: {}
- name: test-a
os: ubuntu
env: {}
- name: test-a-alt
os: ubuntu
env: {}
- name: test-i686
os: ubuntu
env: {}
- name: dist-i686
os: ubuntu
env: {}
- name: test-msvc-i686-1
os: ubuntu
env: {}
- name: test-msvc-i686-2
os: ubuntu
env: {}
"#,
)
.unwrap();
check_pattern(&db, "dist-*", &["dist-a", "dist-a-alt", "dist-b", "dist-b-alt", "dist-i686"]);
check_pattern(&db, "*-alt", &["dist-a-alt", "dist-b-alt", "test-a-alt"]);
check_pattern(&db, "dist*-alt", &["dist-a-alt", "dist-b-alt"]);
check_pattern(
&db,
"*i686*",
&["test-i686", "dist-i686", "test-msvc-i686-1", "test-msvc-i686-2"],
);
}
#[track_caller]
fn check_pattern(db: &JobDatabase, pattern: &str, expected: &[&str]) {
let jobs =
db.find_auto_jobs_by_pattern(pattern).into_iter().map(|j| j.name).collect::<Vec<_>>();
assert_eq!(jobs, expected);
}

View file

@ -1,5 +1,6 @@
mod cpu_usage;
mod datadog;
mod jobs;
mod merge_report;
mod metrics;
mod utils;
@ -10,10 +11,12 @@ use std::process::Command;
use anyhow::Context;
use clap::Parser;
use jobs::JobDatabase;
use serde_yaml::Value;
use crate::cpu_usage::load_cpu_usage;
use crate::datadog::upload_datadog_metric;
use crate::jobs::RunType;
use crate::merge_report::post_merge_report;
use crate::metrics::postprocess_metrics;
use crate::utils::load_env_var;
@ -22,104 +25,6 @@ const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
const JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../github-actions/jobs.yml");
/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file.
#[derive(serde::Deserialize, Debug, Clone)]
struct Job {
/// Name of the job, e.g. mingw-check
name: String,
/// GitHub runner on which the job should be executed
os: String,
env: BTreeMap<String, Value>,
/// Should the job be only executed on a specific channel?
#[serde(default)]
only_on_channel: Option<String>,
/// Rest of attributes that will be passed through to GitHub actions
#[serde(flatten)]
extra_keys: BTreeMap<String, Value>,
}
impl Job {
fn is_linux(&self) -> bool {
self.os.contains("ubuntu")
}
/// By default, the Docker image of a job is based on its name.
/// However, it can be overridden by its IMAGE environment variable.
fn image(&self) -> String {
self.env
.get("IMAGE")
.map(|v| v.as_str().expect("IMAGE value should be a string").to_string())
.unwrap_or_else(|| self.name.clone())
}
}
#[derive(serde::Deserialize, Debug)]
struct JobEnvironments {
#[serde(rename = "pr")]
pr_env: BTreeMap<String, Value>,
#[serde(rename = "try")]
try_env: BTreeMap<String, Value>,
#[serde(rename = "auto")]
auto_env: BTreeMap<String, Value>,
}
#[derive(serde::Deserialize, Debug)]
struct JobDatabase {
#[serde(rename = "pr")]
pr_jobs: Vec<Job>,
#[serde(rename = "try")]
try_jobs: Vec<Job>,
#[serde(rename = "auto")]
auto_jobs: Vec<Job>,
/// Shared environments for the individual run types.
envs: JobEnvironments,
}
impl JobDatabase {
fn find_auto_job_by_name(&self, name: &str) -> Option<Job> {
self.auto_jobs.iter().find(|j| j.name == name).cloned()
}
}
fn load_job_db(path: &Path) -> anyhow::Result<JobDatabase> {
let db = utils::read_to_string(path)?;
let mut db: Value = serde_yaml::from_str(&db)?;
// We need to expand merge keys (<<), because serde_yaml can't deal with them
// `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges.
db.apply_merge()?;
db.apply_merge()?;
let db: JobDatabase = serde_yaml::from_value(db)?;
Ok(db)
}
/// Representation of a job outputted to a GitHub Actions workflow.
#[derive(serde::Serialize, Debug)]
struct GithubActionsJob {
/// The main identifier of the job, used by CI scripts to determine what should be executed.
name: String,
/// Helper label displayed in GitHub Actions interface, containing the job name and a run type
/// prefix (PR/try/auto).
full_name: String,
os: String,
env: BTreeMap<String, serde_json::Value>,
#[serde(flatten)]
extra_keys: BTreeMap<String, serde_json::Value>,
}
/// Type of workflow that is being executed on CI
#[derive(Debug)]
enum RunType {
/// Workflows that run after a push to a PR branch
PullRequest,
/// Try run started with @bors try
TryJob { custom_jobs: Option<Vec<String>> },
/// Merge attempt workflow
AutoJob,
}
struct GitHubContext {
event_name: String,
branch_ref: String,
@ -130,24 +35,31 @@ impl GitHubContext {
fn get_run_type(&self) -> Option<RunType> {
match (self.event_name.as_str(), self.branch_ref.as_str()) {
("pull_request", _) => Some(RunType::PullRequest),
("push", "refs/heads/try-perf") => Some(RunType::TryJob { custom_jobs: None }),
("push", "refs/heads/try-perf") => Some(RunType::TryJob { job_patterns: None }),
("push", "refs/heads/try" | "refs/heads/automation/bors/try") => {
let custom_jobs = self.get_custom_jobs();
let custom_jobs = if !custom_jobs.is_empty() { Some(custom_jobs) } else { None };
Some(RunType::TryJob { custom_jobs })
let patterns = self.get_try_job_patterns();
let patterns = if !patterns.is_empty() { Some(patterns) } else { None };
Some(RunType::TryJob { job_patterns: patterns })
}
("push", "refs/heads/auto") => Some(RunType::AutoJob),
_ => None,
}
}
/// Tries to parse names of specific CI jobs that should be executed in the form of
/// try-job: <job-name>
/// from the commit message of the passed GitHub context.
fn get_custom_jobs(&self) -> Vec<String> {
/// Tries to parse patterns of CI jobs that should be executed
/// from the commit message of the passed GitHub context
///
/// They can be specified in the form of
/// try-job: <job-pattern>
/// or
/// try-job: `<job-pattern>`
/// (to avoid GitHub rendering the glob patterns as Markdown)
fn get_try_job_patterns(&self) -> Vec<String> {
if let Some(ref msg) = self.commit_message {
msg.lines()
.filter_map(|line| line.trim().strip_prefix("try-job: "))
// Strip backticks if present
.map(|l| l.trim_matches('`'))
.map(|l| l.trim().to_string())
.collect()
} else {
@ -164,15 +76,6 @@ fn load_github_ctx() -> anyhow::Result<GitHubContext> {
Ok(GitHubContext { event_name, branch_ref: load_env_var("GITHUB_REF")?, commit_message })
}
/// Skip CI jobs that are not supposed to be executed on the given `channel`.
fn skip_jobs(jobs: Vec<Job>, channel: &str) -> Vec<Job> {
jobs.into_iter()
.filter(|job| {
job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel)
})
.collect()
}
fn yaml_map_to_json(map: &BTreeMap<String, Value>) -> BTreeMap<String, serde_json::Value> {
map.into_iter()
.map(|(key, value)| {
@ -184,124 +87,13 @@ fn yaml_map_to_json(map: &BTreeMap<String, Value>) -> BTreeMap<String, serde_jso
.collect()
}
/// Maximum number of custom try jobs that can be requested in a single
/// `@bors try` request.
const MAX_TRY_JOBS_COUNT: usize = 20;
fn calculate_jobs(
run_type: &RunType,
db: &JobDatabase,
channel: &str,
) -> anyhow::Result<Vec<GithubActionsJob>> {
let (jobs, prefix, base_env) = match run_type {
RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env),
RunType::TryJob { custom_jobs } => {
let jobs = if let Some(custom_jobs) = custom_jobs {
if custom_jobs.len() > MAX_TRY_JOBS_COUNT {
return Err(anyhow::anyhow!(
"It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs",
custom_jobs.len()
));
}
let mut jobs = vec![];
let mut unknown_jobs = vec![];
for custom_job in custom_jobs {
if let Some(job) = db.find_auto_job_by_name(custom_job) {
jobs.push(job);
} else {
unknown_jobs.push(custom_job.clone());
}
}
if !unknown_jobs.is_empty() {
return Err(anyhow::anyhow!(
"Custom job(s) `{}` not found in auto jobs",
unknown_jobs.join(", ")
));
}
jobs
} else {
db.try_jobs.clone()
};
(jobs, "try", &db.envs.try_env)
}
RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env),
};
let jobs = skip_jobs(jobs, channel);
let jobs = jobs
.into_iter()
.map(|job| {
let mut env: BTreeMap<String, serde_json::Value> = yaml_map_to_json(base_env);
env.extend(yaml_map_to_json(&job.env));
let full_name = format!("{prefix} - {}", job.name);
GithubActionsJob {
name: job.name,
full_name,
os: job.os,
env,
extra_keys: yaml_map_to_json(&job.extra_keys),
}
})
.collect();
Ok(jobs)
}
fn calculate_job_matrix(
db: JobDatabase,
gh_ctx: GitHubContext,
channel: &str,
) -> anyhow::Result<()> {
let run_type = gh_ctx.get_run_type().ok_or_else(|| {
anyhow::anyhow!("Cannot determine the type of workflow that is being executed")
})?;
eprintln!("Run type: {run_type:?}");
let jobs = calculate_jobs(&run_type, &db, channel)?;
if jobs.is_empty() {
return Err(anyhow::anyhow!("Computed job list is empty"));
}
let run_type = match run_type {
RunType::PullRequest => "pr",
RunType::TryJob { .. } => "try",
RunType::AutoJob => "auto",
};
eprintln!("Output");
eprintln!("jobs={jobs:?}");
eprintln!("run_type={run_type}");
println!("jobs={}", serde_json::to_string(&jobs)?);
println!("run_type={run_type}");
Ok(())
}
fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> {
let Some(job) = jobs.iter().find(|j| j.name == name) else {
let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect();
let mut available_jobs =
available_jobs.iter().map(|j| j.name.to_string()).collect::<Vec<_>>();
available_jobs.sort();
return Err(anyhow::anyhow!(
"Job {name} not found. The following jobs are available:\n{}",
available_jobs.join(", ")
));
};
if !job.is_linux() {
return Err(anyhow::anyhow!("Only Linux jobs can be executed locally"));
}
Ok(job)
}
fn run_workflow_locally(db: JobDatabase, job_type: JobType, name: String) -> anyhow::Result<()> {
let jobs = match job_type {
JobType::Auto => &db.auto_jobs,
JobType::PR => &db.pr_jobs,
};
let job = find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?;
let job =
jobs::find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?;
let mut custom_env: BTreeMap<String, String> = BTreeMap::new();
// Replicate src/ci/scripts/setup-environment.sh
@ -385,7 +177,7 @@ enum Args {
}
#[derive(clap::ValueEnum, Clone)]
enum JobType {
pub enum JobType {
/// Merge attempt ("auto") job
Auto,
/// Pull request job
@ -395,7 +187,10 @@ enum JobType {
fn main() -> anyhow::Result<()> {
let args = Args::parse();
let default_jobs_file = Path::new(JOBS_YML_PATH);
let load_db = |jobs_path| load_job_db(jobs_path).context("Cannot load jobs.yml");
let load_db = |jobs_path| {
let db = utils::read_to_string(jobs_path)?;
Ok::<_, anyhow::Error>(jobs::load_job_db(&db).context("Cannot load jobs.yml")?)
};
match args {
Args::CalculateJobMatrix { jobs_file } => {
@ -407,7 +202,7 @@ fn main() -> anyhow::Result<()> {
.trim()
.to_string();
calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel)
jobs::calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel)
.context("Failed to calculate job matrix")?;
}
Args::RunJobLocally { job_type, name } => {

View file

@ -4,7 +4,7 @@ use std::collections::HashMap;
use anyhow::Context;
use build_helper::metrics::{JsonRoot, TestOutcome};
use crate::JobDatabase;
use crate::jobs::JobDatabase;
use crate::metrics::get_test_suites;
type Sha = String;

View file

@ -133,29 +133,37 @@ There are several use-cases for try builds:
Again, a working compiler build is needed for this, which can be produced by
the [dist-x86_64-linux] CI job.
- Run a specific CI job (e.g. Windows tests) on a PR, to quickly test if it
passes the test suite executed by that job. You can select which CI jobs will
be executed in the try build by adding up to 10 lines containing `try-job:
<name of job>` to the PR description. All such specified jobs will be executed
in the try build once the `@bors try` command is used on the PR. If no try
jobs are specified in this way, the jobs defined in the `try` section of
[`jobs.yml`] will be executed by default.
passes the test suite executed by that job.
You can select which CI jobs will
be executed in the try build by adding lines containing `try-job:
<job pattern>` to the PR description. All such specified jobs will be executed
in the try build once the `@bors try` command is used on the PR. If no try
jobs are specified in this way, the jobs defined in the `try` section of
[`jobs.yml`] will be executed by default.
Each pattern can either be an exact name of a job or a glob pattern that matches multiple jobs,
for example `*msvc*` or `*-alt`. You can start at most 20 jobs in a single try build. When using
glob patterns, you might want to wrap them in backticks (`` ` ``) to avoid GitHub rendering
the pattern as Markdown.
> **Using `try-job` PR description directives**
>
> 1. Identify which set of try-jobs (max 10) you would like to exercise. You can
> 1. Identify which set of try-jobs you would like to exercise. You can
> find the name of the CI jobs in [`jobs.yml`].
>
> 2. Amend PR description to include (usually at the end of the PR description)
> e.g.
> 2. Amend PR description to include a set of patterns (usually at the end
> of the PR description), for example:
>
> ```text
> This PR fixes #123456.
>
> try-job: x86_64-msvc
> try-job: test-various
> try-job: `*-alt`
> ```
>
> Each `try-job` directive must be on its own line.
> Each `try-job` pattern must be on its own line.
>
> 3. Run the prescribed try jobs with `@bors try`. As aforementioned, this
> requires the user to either (1) have `try` permissions or (2) be delegated