diff --git a/infra/build/functions/base_images.py b/infra/build/functions/base_images.py index 49185f3c4ff2..0df75b731a63 100644 --- a/infra/build/functions/base_images.py +++ b/infra/build/functions/base_images.py @@ -13,10 +13,19 @@ # limitations under the License. # ################################################################################ -"""Cloud function to build base images on Google Cloud Builder.""" +"""Cloud function to build base images on Google Cloud Builder. + +This script can be run locally for testing or deployment purposes. By default, +it performs a real build. To perform a dry run, use the '--dry-run' flag. To +prevent images from being pushed to the registry, use '--no-push'. + +Example: + python3 infra/build/functions/base_images.py --dry-run +""" from collections.abc import Sequence import logging import os +import sys import google.auth @@ -25,158 +34,320 @@ BASE_PROJECT = 'oss-fuzz-base' IMAGE_NAME_PREFIX = f'gcr.io/{BASE_PROJECT}/' MAJOR_TAG = 'v1' -MANIFEST_IMAGES = [ - 'gcr.io/oss-fuzz-base/base-builder', 'gcr.io/oss-fuzz-base/base-runner' -] TIMEOUT = '21600' # 6 hours +# Defines the Ubuntu versions supported by the build infrastructure. +# 'legacy' refers to the unversioned, default image. +# Note: This list indicates build capability, not production readiness. +# A version is only ready for general use after being fully enabled in +# ClusterFuzz. +SUPPORTED_VERSIONS = ('legacy', 'ubuntu-20-04', 'ubuntu-24-04') + +# Define which of the supported versions is considered the default. +# This version will receive the ':v1' tag. +DEFAULT_VERSION = 'legacy' + class ImageConfig: + """Configuration for a specific base image version.""" name: str + version: str path: str + dockerfile_path: str build_args: Sequence[str] | None def __init__(self, name: str, + version: str, path: str | None = None, build_args: Sequence[str] | None = None): self.name = name - if path: - self.path = path - else: - self.path = get_base_image_path(name) - + self.version = version + self.path = path if path else self._get_default_path() + self.dockerfile_path = self._resolve_dockerfile() self.build_args = build_args + def _get_default_path(self) -> str: + """Returns the default path to the image's build directory.""" + if self.name == 'indexer': + return os.path.join('infra', 'indexer') + return os.path.join('infra', 'base-images', self.name) + + def _resolve_dockerfile(self) -> str: + """Resolves the path to the Dockerfile. + + Prefers a version-specific one if it exists, otherwise falling back to the + legacy Dockerfile. + """ + if self.version != 'legacy': + versioned_dockerfile = os.path.join(self.path, + f'{self.version}.Dockerfile') + if os.path.exists(versioned_dockerfile): + logging.info('Using versioned Dockerfile: %s', versioned_dockerfile) + return versioned_dockerfile + + legacy_dockerfile = os.path.join(self.path, 'Dockerfile') + logging.info('Using legacy Dockerfile: %s', legacy_dockerfile) + return legacy_dockerfile + + @property + def final_tag(self) -> str: + """ + Returns the final tag for the image, using ':v1' for the default + version and the version name for others. + """ + return MAJOR_TAG if self.version == DEFAULT_VERSION else self.version + @property - def full_image_name(self): - return IMAGE_NAME_PREFIX + self.name - - -def get_base_image_path(image_name): - """Returns the path to the directory containing the Dockerfile of the base - image.""" - return os.path.join('infra', 'base-images', image_name) - - -BASE_IMAGES = [ - ImageConfig('base-image'), - ImageConfig('base-clang'), - ImageConfig('base-clang-full', - path=get_base_image_path('base-clang'), - build_args=('FULL_LLVM_BUILD=1',)), - ImageConfig('indexer', path=os.path.join('infra', 'indexer')), - ImageConfig('base-builder'), - ImageConfig('base-builder-go'), - ImageConfig('base-builder-javascript'), - ImageConfig('base-builder-jvm'), - ImageConfig('base-builder-python'), - ImageConfig('base-builder-ruby'), - ImageConfig('base-builder-rust'), - ImageConfig('base-builder-swift'), - ImageConfig('base-runner'), - ImageConfig('base-runner-debug'), + def full_image_name_with_tag(self) -> str: + """Returns the full GCR image name with the final tag.""" + return f'{IMAGE_NAME_PREFIX}{self.name}:{self.final_tag}' + + +# Definitions of the base images to be built. +BASE_IMAGE_DEFS = [ + { + 'name': 'base-image' + }, + { + 'name': 'base-clang' + }, + { + 'name': 'base-clang-full', + 'path': 'infra/base-images/base-clang', + 'build_args': ('FULL_LLVM_BUILD=1',) + }, + { + 'name': 'indexer' + }, + { + 'name': 'base-builder' + }, + { + 'name': 'base-builder-go' + }, + { + 'name': 'base-builder-javascript' + }, + { + 'name': 'base-builder-jvm' + }, + { + 'name': 'base-builder-python' + }, + { + 'name': 'base-builder-ruby' + }, + { + 'name': 'base-builder-rust' + }, + { + 'name': 'base-builder-swift' + }, + { + 'name': 'base-runner' + }, + { + 'name': 'base-runner-debug' + }, ] def get_base_image_steps(images: Sequence[ImageConfig]) -> list[dict]: - """Returns build steps for given images.""" + """Returns build steps for a given list of image configurations.""" steps = [build_lib.get_git_clone_step()] + for image_config in images: + # The final tag is ':v1' for the default version, or the version name + # (e.g., ':ubuntu-24-04') for others. + tags = [image_config.full_image_name_with_tag] + + # The 'legacy' build is also tagged as 'latest' for use by subsequent + # build steps within the same pipeline. + if image_config.version == 'legacy': + tags.append(f'{IMAGE_NAME_PREFIX}{image_config.name}:latest') - for base_image in images: - image = base_image.full_image_name - tagged_image = image + ':' + MAJOR_TAG + dockerfile_path = os.path.join('oss-fuzz', image_config.dockerfile_path) steps.append( - build_lib.get_docker_build_step([image, tagged_image], - base_image.path, - build_args=base_image.build_args)) + build_lib.get_docker_build_step(tags, + image_config.path, + dockerfile_path=dockerfile_path, + build_args=image_config.build_args)) return steps -# pylint: disable=no-member -def run_build(steps, images, tags=None, build_version=MAJOR_TAG): - """Execute the build |steps| in GCB and push |images| to the registry.""" +def run_build(steps: list[dict], + images_to_push: list[str], + build_version: str, + tags: list[str] | None = None, + dry_run: bool = False, + no_push: bool = False): + """Executes a build in GCB and pushes the resulting images. + + Alternatively, prints the configuration if in dry_run mode. + """ + if dry_run: + print( + '--------------------------------------------------------------------') + print(f'DRY RUN FOR VERSION: {build_version}') + print( + '--------------------------------------------------------------------') + print(f'Images to push: {images_to_push}') + print(f'Push enabled: {not no_push}') + print('Build steps:') + for step in steps: + print(f" - {step['name']}: {' '.join(step['args'])}") + print( + '--------------------------------------------------------------------\n' + ) + return + + images_for_gcb = images_to_push + if no_push: + logging.info('"--no-push" flag detected. Skipping push to registry.') + images_for_gcb = [] + credentials, _ = google.auth.default() - images = [image for image in images if image not in MANIFEST_IMAGES - ] + ([f'{image}:{build_version}' for image in images]) body_overrides = { - 'images': images, + 'images': images_for_gcb, 'options': { 'machineType': 'E2_HIGHCPU_32' }, } - return build_lib.run_build('', - steps, - credentials, - BASE_PROJECT, - TIMEOUT, - body_overrides, - tags, - use_build_pool=False) - - -def get_images_architecture_manifest_steps(): - """Returns steps to create manifests for ARM and x86_64 versions of - base-runner and base-builder.""" + build_tags = ['base-image-build', f'version-{build_version}'] + if tags: + build_tags.extend(tags) + + build_info = build_lib.run_build('', + steps, + credentials, + BASE_PROJECT, + TIMEOUT, + body_overrides, + build_tags, + use_build_pool=False) + + if build_info: + build_id = build_info.get('id') + log_url = build_info.get('logUrl') + logging.info('Successfully triggered build %s for version %s.', build_id, + build_version) + logging.info('Build logs are available at: %s', log_url) + else: + logging.error('Failed to trigger build for version %s.', build_version) + + +def get_images_architecture_manifest_steps(target_tag: str) -> list[dict]: + """Returns steps for creating and pushing a multi-architecture manifest. + + The manifest is for the base-builder and base-runner images with a + specific tag. + """ images = [ f'{IMAGE_NAME_PREFIX}base-builder', f'{IMAGE_NAME_PREFIX}base-runner' ] steps = [] for image in images: - steps.extend(get_image_push_architecture_manifest_steps(image)) + steps.extend(get_image_push_architecture_manifest_steps(image, target_tag)) return steps -def get_image_push_architecture_manifest_steps(image): - """Returns the steps to push a manifest pointing to ARM64 and AMD64 versions - of |image|.""" - arm_testing_image = f'{image}-testing-arm' - amd64_manifest_image = f'{image}:manifest-amd64' - arm64_manifest_image = f'{image}:manifest-arm64v8' +def get_image_push_architecture_manifest_steps(image: str, + target_tag: str) -> list[dict]: + """Returns steps for pushing a manifest pointing to ARM64/AMD64 versions.""" + # The AMD64 image is the one we just built. + amd64_source_image = f'{image}:{target_tag}' + # The ARM64 image is a pre-built generic testing image. + arm64_source_image = f'{image}-testing-arm' + # The final manifest will point to this tag. + manifest_tag = f'{image}:{target_tag}' + + # Intermediate tags for pushing architecture-specific images. + amd64_manifest_image = f'{image}:{target_tag}-manifest-amd64' + arm64_manifest_image = f'{image}:{target_tag}-manifest-arm64v8' + steps = [ + # Tag and push the AMD64 image. { 'name': 'gcr.io/cloud-builders/docker', - 'args': ['tag', image, amd64_manifest_image], + 'args': ['tag', amd64_source_image, amd64_manifest_image], }, { 'name': 'gcr.io/cloud-builders/docker', 'args': ['push', amd64_manifest_image], }, + # Pull and tag the ARM64 image. { 'name': 'gcr.io/cloud-builders/docker', - 'args': ['pull', arm_testing_image], + 'args': ['pull', arm64_source_image], }, { 'name': 'gcr.io/cloud-builders/docker', - 'args': ['tag', arm_testing_image, arm64_manifest_image], + 'args': ['tag', arm64_source_image, arm64_manifest_image], }, { 'name': 'gcr.io/cloud-builders/docker', 'args': ['push', arm64_manifest_image], }, + # Create and push the manifest. { 'name': 'gcr.io/cloud-builders/docker', 'args': [ - 'manifest', 'create', image, '--amend', arm64_manifest_image, - '--amend', amd64_manifest_image + 'manifest', 'create', manifest_tag, '--amend', + arm64_manifest_image, '--amend', amd64_manifest_image ], }, { 'name': 'gcr.io/cloud-builders/docker', - 'args': ['manifest', 'push', image] - }, + 'args': ['manifest', 'push', manifest_tag] + } ] return steps -def base_builder(event, context): - """Cloud function to build base images.""" +def base_builder(event, context, dry_run: bool = False, no_push: bool = False): + """Cloud function entry point. + + Triggers parallel base image builds for each supported Ubuntu version. + """ del event, context logging.basicConfig(level=logging.INFO) - steps = get_base_image_steps(BASE_IMAGES) - steps.extend(get_images_architecture_manifest_steps()) + for version in SUPPORTED_VERSIONS: + logging.info('Starting build for version: %s', version) + + version_images = [ + ImageConfig(version=version, **def_args) for def_args in BASE_IMAGE_DEFS + ] + steps = get_base_image_steps(version_images) + images_to_push = [img.full_image_name_with_tag for img in version_images] + + # Also push the 'latest' tag for the default build. + if version == DEFAULT_VERSION: + images_to_push.extend( + [f'{IMAGE_NAME_PREFIX}{img.name}:latest' for img in version_images]) + + # Determine the final tag for this build. + target_tag = MAJOR_TAG if version == DEFAULT_VERSION else version + + # Create a multi-architecture manifest for this version's final tag. + logging.info('Adding multi-architecture manifest steps for tag: %s', + target_tag) + steps.extend(get_images_architecture_manifest_steps(target_tag)) + images_to_push.extend([ + f'{IMAGE_NAME_PREFIX}base-builder:{target_tag}', + f'{IMAGE_NAME_PREFIX}base-runner:{target_tag}' + ]) + + logging.info('Triggering GCB build for version: %s', version) + run_build(steps, + images_to_push, + build_version=version, + dry_run=dry_run, + no_push=no_push) + - images = [base_image.full_image_name for base_image in BASE_IMAGES] - run_build(steps, images) +if __name__ == '__main__': + is_dry_run = '--dry-run' in sys.argv + no_push = '--no-push' in sys.argv + base_builder(None, None, dry_run=is_dry_run, no_push=no_push) diff --git a/infra/build/functions/build_and_push_test_images.py b/infra/build/functions/build_and_push_test_images.py index eeaa52b1a333..724686e5ea9a 100755 --- a/infra/build/functions/build_and_push_test_images.py +++ b/infra/build/functions/build_and_push_test_images.py @@ -217,7 +217,8 @@ def gcb_build_and_push_images(test_image_tag: str, version_tag: str = None): test_image_names = [] versions = [version_tag] if version_tag else BASE_IMAGE_VERSIONS for version in versions: - for base_image in base_images.BASE_IMAGES: + for base_image_def in base_images.BASE_IMAGE_DEFS: + base_image = base_images.ImageConfig(version=version, **base_image_def) main_image_name, test_image_name = get_image_tags(base_image.name, test_image_tag, version) test_image_names.append(test_image_name) diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py index 54d1aa8ef1b3..09538e1111ef 100644 --- a/infra/build/functions/build_lib.py +++ b/infra/build/functions/build_lib.py @@ -787,10 +787,7 @@ def run_build( # pylint: disable=too-many-arguments, too-many-locals build_info = cloudbuild.projects().builds().create(projectId=cloud_project, body=build_body).execute() - - build_id = build_info['metadata']['build']['id'] - - return build_id + return build_info['metadata']['build'] def wait_for_build(build_id, credentials, cloud_project): diff --git a/infra/build/functions/report_generator.py b/infra/build/functions/report_generator.py index f912dae4d66e..e1626bedf2aa 100644 --- a/infra/build/functions/report_generator.py +++ b/infra/build/functions/report_generator.py @@ -131,6 +131,7 @@ def main(): all_results = {} any_failures = False any_results_found = False + total_unique_projects = set() print('Generating final build report...') @@ -146,9 +147,13 @@ def main(): any_results_found = True if data.get('failed_builds', 0) > 0: any_failures = True + total_unique_projects.update(data.get('all_projects', [])) if not any_results_found: - error_lines = ['All build versions failed to produce results.'] + error_lines = [ + 'No result files found. This typically means that all upstream builds', + 'either timed out or failed catastrophically.', + ] _print_box('FINAL BUILD REPORT', error_lines) print('\nPipeline finished with failures.') sys.exit(1) @@ -156,12 +161,19 @@ def main(): generate_comparison_table(all_results) generate_final_summary(all_results) - if any_failures: + has_explicit_failures = any_failures + no_projects_were_run = any_results_found and not total_unique_projects + + if has_explicit_failures or no_projects_were_run: + if no_projects_were_run and not has_explicit_failures: + print( + '\nWarning: No projects were run. This may indicate an upstream issue.' + ) print('\nPipeline finished with failures.') sys.exit(1) - else: - print('\nPipeline finished successfully.') - sys.exit(0) + + print('\nPipeline finished successfully.') + sys.exit(0) if __name__ == '__main__':