diff --git a/src/cmd-buildupload b/src/cmd-buildupload index f8ce3a2f6c..cbbdb7305d 100755 --- a/src/cmd-buildupload +++ b/src/cmd-buildupload @@ -38,6 +38,10 @@ def parse_args(): group = parser.add_mutually_exclusive_group() group.add_argument("--skip-builds-json", help="Don't push builds.json", action='store_true') + group.add_argument("--start-build", help="Update builds.json to denote an in-progress build", + action='store_true') + group.add_argument("--fail-build", help="Update builds.json to denote the in-progress build failed", + action='store_true') subparsers = parser.add_subparsers(dest='cmd', title='subcommands') subparsers.required = True @@ -63,18 +67,25 @@ def cmd_upload_s3(args): if args.build == 'latest': args.build = builds.get_latest() print(f"Targeting build: {args.build}") - for arch in builds.get_build_arches(args.build): - s3_upload_build(args, builds.get_build_dir(args.build, arch), - bucket, f'{prefix}/{args.build}/{arch}') - # if there's anything else in the build dir, just upload it too, - # e.g. pipelines might inject additional metadata - for f in os.listdir(f'builds/{args.build}'): - # arches already uploaded higher up - if f in builds.get_build_arches(args.build): - continue - # assume it's metadata - s3_copy(f'builds/{args.build}/{f}', bucket, f'{prefix}/{args.build}/{f}', - CACHE_MAX_AGE_METADATA, args.acl) + if args.start_build: + builds.set_build_in_progress(args.build) + builds.flush() + elif args.fail_build: + builds.set_build_failed(args.build) + builds.flush() + else: + for arch in builds.get_build_arches(args.build): + s3_upload_build(args, builds.get_build_dir(args.build, arch), + bucket, f'{prefix}/{args.build}/{arch}') + # if there's anything else in the build dir, just upload it too, + # e.g. pipelines might inject additional metadata + for f in os.listdir(f'builds/{args.build}'): + # arches already uploaded higher up + if f in builds.get_build_arches(args.build): + continue + # assume it's metadata + s3_copy(f'builds/{args.build}/{f}', bucket, f'{prefix}/{args.build}/{f}', + CACHE_MAX_AGE_METADATA, args.acl) if not args.skip_builds_json: s3_copy(BUILDFILES['list'], bucket, f'{prefix}/builds.json', CACHE_MAX_AGE_METADATA, args.acl, extra_args={}, dry_run=args.dry_run) diff --git a/src/cosalib/builds.py b/src/cosalib/builds.py index e6c1be7c69..b8c6a197eb 100644 --- a/src/cosalib/builds.py +++ b/src/cosalib/builds.py @@ -134,6 +134,28 @@ def bump_timestamp(self): self._data['timestamp'] = rfc3339_time() self.flush() + def set_build_in_progress(self, build_id): + data = { 'id': build_id } + # Pipelines should set this to some sort of "unique" identifier + # for this build. Something like --- + # For the FCOS pipeline, a good identifier would be the + # name of the pod that the Jenkins Kubernetes plugin creates. + pipeline_instance_data = os.environ.get('COSA_PIPELINE_INSTANCE_ID') + if pipeline_instance_data: + data['pipeline'] = pipeline_instance_data + self._data['build-started'] = data + + def set_build_failed(self, build_id): + started = self._data.get('build-started') + if started is None: + raise Exception("Tried to fail build {build_id}," + " but no build-started key present in builds.json") + if started['id'] != build_id: + raise Exception("Tried to fail build {build_id}," + " but active build is {started['id']}") + self._data['build-failed'] = started + del self._data['build-started'] + def raw(self): return self._data