Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 23 additions & 12 deletions src/cmd-buildupload
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@ def parse_args():
group = parser.add_mutually_exclusive_group()
group.add_argument("--skip-builds-json", help="Don't push builds.json",
action='store_true')
group.add_argument("--start-build", help="Update builds.json to denote an in-progress build",
action='store_true')
group.add_argument("--fail-build", help="Update builds.json to denote the in-progress build failed",
action='store_true')

subparsers = parser.add_subparsers(dest='cmd', title='subcommands')
subparsers.required = True
Expand All @@ -63,18 +67,25 @@ def cmd_upload_s3(args):
if args.build == 'latest':
args.build = builds.get_latest()
print(f"Targeting build: {args.build}")
for arch in builds.get_build_arches(args.build):
s3_upload_build(args, builds.get_build_dir(args.build, arch),
bucket, f'{prefix}/{args.build}/{arch}')
# if there's anything else in the build dir, just upload it too,
# e.g. pipelines might inject additional metadata
for f in os.listdir(f'builds/{args.build}'):
# arches already uploaded higher up
if f in builds.get_build_arches(args.build):
continue
# assume it's metadata
s3_copy(f'builds/{args.build}/{f}', bucket, f'{prefix}/{args.build}/{f}',
CACHE_MAX_AGE_METADATA, args.acl)
if args.start_build:
builds.set_build_in_progress(args.build)
builds.flush()
elif args.fail_build:
builds.set_build_failed(args.build)
builds.flush()
else:
for arch in builds.get_build_arches(args.build):
s3_upload_build(args, builds.get_build_dir(args.build, arch),
bucket, f'{prefix}/{args.build}/{arch}')
# if there's anything else in the build dir, just upload it too,
# e.g. pipelines might inject additional metadata
for f in os.listdir(f'builds/{args.build}'):
# arches already uploaded higher up
if f in builds.get_build_arches(args.build):
continue
# assume it's metadata
s3_copy(f'builds/{args.build}/{f}', bucket, f'{prefix}/{args.build}/{f}',
CACHE_MAX_AGE_METADATA, args.acl)
if not args.skip_builds_json:
s3_copy(BUILDFILES['list'], bucket, f'{prefix}/builds.json',
CACHE_MAX_AGE_METADATA, args.acl, extra_args={}, dry_run=args.dry_run)
Expand Down
22 changes: 22 additions & 0 deletions src/cosalib/builds.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,28 @@ def bump_timestamp(self):
self._data['timestamp'] = rfc3339_time()
self.flush()

def set_build_in_progress(self, build_id):
data = { 'id': build_id }
# Pipelines should set this to some sort of "unique" identifier
# for this build. Something like <HOST>-<JOB>-<BUILDID>-<ARCH>
# For the FCOS pipeline, a good identifier would be the
# name of the pod that the Jenkins Kubernetes plugin creates.
pipeline_instance_data = os.environ.get('COSA_PIPELINE_INSTANCE_ID')
if pipeline_instance_data:
data['pipeline'] = pipeline_instance_data
self._data['build-started'] = data

def set_build_failed(self, build_id):
started = self._data.get('build-started')
if started is None:
raise Exception("Tried to fail build {build_id},"
" but no build-started key present in builds.json")
if started['id'] != build_id:
raise Exception("Tried to fail build {build_id},"
" but active build is {started['id']}")
self._data['build-failed'] = started
del self._data['build-started']

def raw(self):
return self._data

Expand Down