Downstream: Skip tasks requiring specific Bazel version (#1065)
Some tasks hardcode their Bazel version due to compatibility issues.
However, the downstream pipeline overwrites these values, thus introducing unwanted pipeline failures.
After this commit such tasks will no longer run in a downstream pipeline, and their name and requested Bazel version will be logged in an annotation instead.
Fixes bazelbuild/bazel/#10698
diff --git a/buildkite/bazelci.py b/buildkite/bazelci.py
index 560d5fd..73fc499 100755
--- a/buildkite/bazelci.py
+++ b/buildkite/bazelci.py
@@ -535,11 +535,7 @@
DEFAULT_XCODE_VERSION = "11.7"
XCODE_VERSION_REGEX = re.compile(r"^\d+\.\d+(\.\d+)?$")
-XCODE_VERSION_OVERRIDES = {
- "10.2.1": "10.3",
- "11.2": "11.2.1",
- "11.3": "11.3.1",
-}
+XCODE_VERSION_OVERRIDES = {"10.2.1": "10.3", "11.2": "11.2.1", "11.3": "11.3.1"}
ENCRYPTED_SAUCELABS_TOKEN = """
CiQAry63sOlZtTNtuOT5DAOLkum0rGof+DOweppZY1aOWbat8zwSTQAL7Hu+rgHSOr6P4S1cu4YG
@@ -567,6 +563,7 @@
INDEX_UPLOAD_POLICY_NEVER = "Never"
+
class BuildkiteException(Exception):
"""
Raised whenever something goes wrong and we should exit with an error.
@@ -602,9 +599,7 @@
"https://api.buildkite.com/v2/organizations/{}/pipelines/{}/builds/{}"
)
- _NEW_BUILD_URL_TEMPLATE = (
- "https://api.buildkite.com/v2/organizations/{}/pipelines/{}/builds"
- )
+ _NEW_BUILD_URL_TEMPLATE = "https://api.buildkite.com/v2/organizations/{}/pipelines/{}/builds"
_RETRY_JOB_URL_TEMPLATE = (
"https://api.buildkite.com/v2/organizations/{}/pipelines/{}/builds/{}/jobs/{}/retry"
@@ -615,7 +610,6 @@
self._pipeline = pipeline
self._token = self._get_buildkite_token()
-
def _get_buildkite_token(self):
return decrypt_token(
encrypted_token=self._ENCRYPTED_BUILDKITE_API_TESTING_TOKEN
@@ -626,8 +620,7 @@
else "buildkite-untrusted-api-token",
)
-
- def _open_url(self, url, params = []):
+ def _open_url(self, url, params=[]):
try:
params_str = "".join("&{}={}".format(k, v) for k, v in params)
return (
@@ -638,7 +631,6 @@
except urllib.error.HTTPError as ex:
raise BuildkiteException("Failed to open {}: {} - {}".format(url, ex.code, ex.reason))
-
def get_build_info(self, build_number):
"""Get build info for a pipeline with a given build number
See https://buildkite.com/docs/apis/rest-api/builds#get-a-build
@@ -656,7 +648,6 @@
output = self._open_url(url)
return json.loads(output)
-
def get_build_info_list(self, params):
"""Get a list of build infos for this pipeline
See https://buildkite.com/docs/apis/rest-api/builds#list-builds-for-a-pipeline
@@ -674,11 +665,9 @@
output = self._open_url(url, params)
return json.loads(output)
-
def get_build_log(self, job):
return self._open_url(job["raw_log_url"])
-
@staticmethod
def _check_response(response, expected_status_code):
if response.status_code != expected_status_code:
@@ -686,8 +675,7 @@
eprint("Response:\n", response.text)
response.raise_for_status()
-
- def trigger_new_build(self, commit, message = None, env = {}):
+ def trigger_new_build(self, commit, message=None, env={}):
"""Trigger a new build at a given commit and return the build metadata.
See https://buildkite.com/docs/apis/rest-api/builds#create-a-build
@@ -709,11 +697,10 @@
"message": message if message else f"Trigger build at {commit}",
"env": env,
}
- response = requests.post(url + "?access_token=" + self._token, json = data)
+ response = requests.post(url + "?access_token=" + self._token, json=data)
BuildkiteClient._check_response(response, requests.codes.created)
return json.loads(response.text)
-
def trigger_job_retry(self, build_number, job_id):
"""Trigger a job retry and return the job metadata.
See https://buildkite.com/docs/apis/rest-api/jobs#retry-a-job
@@ -733,7 +720,6 @@
BuildkiteClient._check_response(response, requests.codes.ok)
return json.loads(response.text)
-
def wait_job_to_finish(self, build_number, job_id, interval_time=30, logger=None):
"""Wait a job to finish and return the job metadata
@@ -759,7 +745,9 @@
return job
break
else:
- raise BuildkiteException(f"job id {job_id} doesn't exist in build " + build_info["web_url"])
+ raise BuildkiteException(
+ f"job id {job_id} doesn't exist in build " + build_info["web_url"]
+ )
url = build_info["web_url"]
if logger:
logger.log(f"Waiting for {url}, waited {t} seconds...")
@@ -767,7 +755,6 @@
t += interval_time
build_info = self.get_build_info(build_number)
-
def wait_build_to_finish(self, build_number, interval_time=30, logger=None):
"""Wait a build to finish and return the build metadata
@@ -1103,7 +1090,9 @@
)
if build_targets:
- build_flags, json_profile_out_build = calculate_flags(task_config, "build_flags", "build", tmpdir, test_env_vars)
+ build_flags, json_profile_out_build = calculate_flags(
+ task_config, "build_flags", "build", tmpdir, test_env_vars
+ )
try:
execute_bazel_build(
bazel_version,
@@ -1121,7 +1110,9 @@
upload_json_profile(json_profile_out_build, tmpdir)
if test_targets:
- test_flags, json_profile_out_test = calculate_flags(task_config, "test_flags", "test", tmpdir, test_env_vars)
+ test_flags, json_profile_out_test = calculate_flags(
+ task_config, "test_flags", "test", tmpdir, test_env_vars
+ )
if not is_windows():
# On platforms that support sandboxing (Linux, MacOS) we have
# to allow access to Bazelisk's cache directory.
@@ -1160,12 +1151,16 @@
upload_thread.join()
if index_targets:
- index_flags, json_profile_out_index = calculate_flags(task_config, "index_flags", "index", tmpdir, test_env_vars)
+ index_flags, json_profile_out_index = calculate_flags(
+ task_config, "index_flags", "index", tmpdir, test_env_vars
+ )
index_upload_policy = task_config.get("index_upload_policy", "IfBuildSuccess")
index_upload_gcs = task_config.get("index_upload_gcs", False)
try:
- should_upload_kzip = True if index_upload_policy == INDEX_UPLOAD_POLICY_ALWAYS else False
+ should_upload_kzip = (
+ True if index_upload_policy == INDEX_UPLOAD_POLICY_ALWAYS else False
+ )
try:
execute_bazel_build_with_kythe(
bazel_version,
@@ -1174,7 +1169,7 @@
index_flags,
index_targets,
None,
- incompatible_flags
+ incompatible_flags,
)
if index_upload_policy == INDEX_UPLOAD_POLICY_IF_BUILD_SUCCESS:
@@ -1193,7 +1188,6 @@
if json_profile_out_index:
upload_json_profile(json_profile_out_index, tmpdir)
-
finally:
terminate_background_process(sc_process)
if tmpdir:
@@ -1334,14 +1328,7 @@
pipeline = os.getenv("BUILDKITE_PIPELINE_SLUG")
destination = KZIPS_BUCKET + pipeline + "/" + final_kzip_name
print("Uploading to GCS {}".format(destination))
- execute_command(
- [
- gsutil_command(),
- "cp",
- final_kzip_name,
- destination,
- ]
- )
+ execute_command([gsutil_command(), "cp", final_kzip_name, destination])
def download_binary(dest_dir, platform, binary_name):
@@ -1365,16 +1352,11 @@
return download_binary(dest_dir, platform, binary_name)
-def download_binary_at_commit(dest_dir, platform, bazel_git_commit, bazel_binary_url, bazel_binary_path):
+def download_binary_at_commit(
+ dest_dir, platform, bazel_git_commit, bazel_binary_url, bazel_binary_path
+):
try:
- execute_command(
- [
- gsutil_command(),
- "cp",
- bazel_binary_url,
- bazel_binary_path,
- ]
- )
+ execute_command([gsutil_command(), "cp", bazel_binary_url, bazel_binary_path])
except subprocess.CalledProcessError as e:
raise BuildkiteException(
"Failed to download Bazel binary at %s, error message:\n%s" % (bazel_git_commit, str(e))
@@ -1792,17 +1774,14 @@
test_targets = [] if build_only else task_config.get("test_targets", [])
index_targets = [] if (build_only or test_only) else task_config.get("index_targets", [])
- index_targets_query = None if (build_only or test_only) else task_config.get("index_targets_query", None)
+ index_targets_query = (
+ None if (build_only or test_only) else task_config.get("index_targets_query", None)
+ )
if index_targets_query:
output = execute_command_and_get_output(
[bazel_binary]
+ common_startup_flags(platform)
- + [
- "--nomaster_bazelrc",
- "--bazelrc=/dev/null",
- "query",
- index_targets_query,
- ],
+ + ["--nomaster_bazelrc", "--bazelrc=/dev/null", "query", index_targets_query],
print_output=False,
)
index_targets += output.strip().split("\n")
@@ -2209,12 +2188,26 @@
git_commit = get_last_green_commit(last_green_commit_url)
config_hashes = set()
+ skipped_due_to_bazel_version = []
for task, task_config in task_configs.items():
+ platform = get_platform_for_task(task, task_config)
+ task_name = task_config.get("name")
+
# We override the Bazel version in downstream pipelines. This means that two tasks that
# only differ in the value of their explicit "bazel" field will be identical in the
# downstream pipeline, thus leading to duplicate work.
# Consequently, we filter those duplicate tasks here.
if is_downstream_project:
+ # Skip tasks that require a specific Bazel version
+ bazel = task_config.get("bazel")
+ if bazel and bazel != "latest":
+ skipped_due_to_bazel_version.append(
+ "{}: '{}'".format(
+ create_label(platform, project_name, task_name=task_name), bazel
+ )
+ )
+ continue
+
h = hash_task_config(task, task_config)
if h in config_hashes:
continue
@@ -2227,9 +2220,9 @@
raise BuildkiteException("Task {} has invalid shard value '{}'".format(task, shards))
step = runner_step(
- platform=get_platform_for_task(task, task_config),
+ platform=platform,
task=task,
- task_name=task_config.get("name"),
+ task_name=task_name,
project_name=project_name,
http_config=http_config,
file_config=file_config,
@@ -2242,6 +2235,22 @@
)
pipeline_steps.append(step)
+ if skipped_due_to_bazel_version:
+ lines = ["The following tasks were skipped since they require specific Bazel versions:", ""]
+ lines += ["- {}".format(s) for s in skipped_due_to_bazel_version]
+ commands = [
+ "buildkite-agent annotate --style=info '{}' --context 'ctx-skipped_due_to_bazel_version'".format(
+ "\n".join(lines)
+ )
+ ]
+ pipeline_steps.append(
+ create_step(
+ label=":pipeline: Print information about skipped tasks due to different Bazel versions",
+ commands=commands,
+ platform=DEFAULT_PLATFORM,
+ )
+ )
+
pipeline_slug = os.getenv("BUILDKITE_PIPELINE_SLUG")
all_downstream_pipeline_slugs = []
for _, config in DOWNSTREAM_PROJECTS.items():
@@ -2254,7 +2263,8 @@
# - uses a custom built Bazel binary (in Bazel Downstream Projects pipeline)
# - testing incompatible flags
# - running `bazelisk --migrate` in a non-downstream pipeline
- if (os.getenv("BUILDKITE_BRANCH") in ("master", "stable", "main")
+ if (
+ os.getenv("BUILDKITE_BRANCH") in ("master", "stable", "main")
and pipeline_slug in all_downstream_pipeline_slugs
and not (is_pull_request() or use_but or incompatible_flags or use_bazelisk_migrate())
):
@@ -2293,8 +2303,9 @@
host = re.search(r"https://(.+?)\.googlesource", git_repository).group(1)
if not host:
raise BuildkiteException("Couldn't get host name from %s" % git_repository)
- text = "The transformed code used in this pipeline can be found under https://{}-review.googlesource.com/q/{}". \
- format(host, os.getenv("BUILDKITE_COMMIT"))
+ text = "The transformed code used in this pipeline can be found under https://{}-review.googlesource.com/q/{}".format(
+ host, os.getenv("BUILDKITE_COMMIT")
+ )
commands = ["buildkite-agent annotate --style=info '{}'".format(text)]
pipeline_steps.append(
create_step(
@@ -2765,7 +2776,9 @@
if test_incompatible_flags:
incompatible_flags_map = fetch_incompatible_flags()
if not incompatible_flags_map:
- raise BuildkiteException("No incompatible flag issue is found on github for current version of Bazel.")
+ raise BuildkiteException(
+ "No incompatible flag issue is found on github for current version of Bazel."
+ )
info_box_step = print_incompatible_flags_info_box_step(incompatible_flags_map)
if info_box_step is not None:
pipeline_steps.append(info_box_step)
@@ -3070,7 +3083,9 @@
gsutil_command(),
"cp",
bazel_nojdk_binary_path,
- bazelci_builds_nojdk_gs_url(target_platform_name, os.environ["BUILDKITE_COMMIT"]),
+ bazelci_builds_nojdk_gs_url(
+ target_platform_name, os.environ["BUILDKITE_COMMIT"]
+ ),
]
)
bazel_nojdk_hashes[target_platform_name] = sha256_hexdigest(bazel_nojdk_binary_path)
@@ -3160,7 +3175,9 @@
break
try:
- try_publish_binaries(bazel_hashes, bazel_nojdk_hashes, current_build_number, latest_generation)
+ try_publish_binaries(
+ bazel_hashes, bazel_nojdk_hashes, current_build_number, latest_generation
+ )
except BinaryUploadRaceException:
# Retry.
continue