From a1e3911638be3c245e920e70394b19ad1a160091 Mon Sep 17 00:00:00 2001 From: Dave Protasowski Date: Thu, 21 Feb 2019 10:39:20 -0500 Subject: [PATCH 1/3] extract out an envoy_http_archive rule this is now used by the api package Signed-off-by: Dave Protasowski --- api/bazel/envoy_http_archive.bzl | 24 ++++++++ api/bazel/repositories.bzl | 96 +++++++++++------------------- api/bazel/repository_locations.bzl | 51 ++++++++++++++++ bazel/repositories.bzl | 31 ++-------- 4 files changed, 115 insertions(+), 87 deletions(-) create mode 100644 api/bazel/envoy_http_archive.bzl create mode 100644 api/bazel/repository_locations.bzl diff --git a/api/bazel/envoy_http_archive.bzl b/api/bazel/envoy_http_archive.bzl new file mode 100644 index 0000000000000..30207282847c7 --- /dev/null +++ b/api/bazel/envoy_http_archive.bzl @@ -0,0 +1,24 @@ +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +def envoy_http_archive(name, locations, **kwargs): + # `existing_rule_keys` contains the names of repositories that have already + # been defined in the Bazel workspace. By skipping repos with existing keys, + # users can override dependency versions by using standard Bazel repository + # rules in their WORKSPACE files. + existing_rule_keys = native.existing_rules().keys() + if name in existing_rule_keys: + # This repository has already been defined, probably because the user + # wants to override the version. Do nothing. + return + + loc_key = kwargs.pop("repository_key", name) + location = locations[loc_key] + + # HTTP tarball at a given URL. Add a BUILD file if requested. + http_archive( + name = name, + urls = location["urls"], + sha256 = location["sha256"], + strip_prefix = location.get("strip_prefix", ""), + **kwargs + ) diff --git a/api/bazel/repositories.bzl b/api/bazel/repositories.bzl index e524fcd14d8bf..f7ae937e642fa 100644 --- a/api/bazel/repositories.bzl +++ b/api/bazel/repositories.bzl @@ -1,43 +1,38 @@ -BAZEL_SKYLIB_RELEASE = "0.6.0" -BAZEL_SKYLIB_SHA256 = "eb5c57e4c12e68c0c20bc774bfbc60a568e800d025557bc4ea022c6479acc867" - -GOGOPROTO_RELEASE = "1.2.0" -GOGOPROTO_SHA256 = "957c8f03cf595525d2a667035d9865a0930b3d446be0ab6eb76972934f925b00" - -OPENCENSUS_RELEASE = "0.1.0" -OPENCENSUS_SHA256 = "4fd21cc6de63d7cb979fd749d8101ff425905aa0826fed26019d1c311fcf19a7" - -PGV_RELEASE = "0.0.13" -PGV_SHA256 = "dce6c8a43849d2abe4d5e40f16e9a476bca6b7a47e128db4458a52d748f4a5eb" - -GOOGLEAPIS_GIT_SHA = "d642131a6e6582fc226caf9893cb7fe7885b3411" # May 23, 2018 -GOOGLEAPIS_SHA = "16f5b2e8bf1e747a32f9a62e211f8f33c94645492e9bbd72458061d9a9de1f63" - -PROMETHEUS_GIT_SHA = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c" # Nov 17, 2017 -PROMETHEUS_SHA = "783bdaf8ee0464b35ec0c8704871e1e72afa0005c3f3587f65d9d6694bf3911b" - load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load(":envoy_http_archive.bzl", "envoy_http_archive") +load(":repository_locations.bzl", "REPOSITORY_LOCATIONS") def api_dependencies(): - http_archive( - name = "bazel_skylib", - sha256 = BAZEL_SKYLIB_SHA256, - strip_prefix = "bazel-skylib-" + BAZEL_SKYLIB_RELEASE, - url = "https://github.com/bazelbuild/bazel-skylib/archive/" + BAZEL_SKYLIB_RELEASE + ".tar.gz", + envoy_http_archive( + "bazel_skylib", + locations = REPOSITORY_LOCATIONS, ) - http_archive( - name = "com_lyft_protoc_gen_validate", - sha256 = PGV_SHA256, - strip_prefix = "protoc-gen-validate-" + PGV_RELEASE, - url = "https://github.com/lyft/protoc-gen-validate/archive/v" + PGV_RELEASE + ".tar.gz", + envoy_http_archive( + "com_lyft_protoc_gen_validate", + locations = REPOSITORY_LOCATIONS, ) - http_archive( + envoy_http_archive( name = "googleapis", - strip_prefix = "googleapis-" + GOOGLEAPIS_GIT_SHA, - url = "https://github.com/googleapis/googleapis/archive/" + GOOGLEAPIS_GIT_SHA + ".tar.gz", - # TODO(dio): Consider writing a Skylark macro for importing Google API proto. - sha256 = GOOGLEAPIS_SHA, - build_file_content = """ + locations = REPOSITORY_LOCATIONS, + build_file_content = GOOGLEAPIS_BUILD_CONTENT, + ) + envoy_http_archive( + name = "com_github_gogo_protobuf", + locations = REPOSITORY_LOCATIONS, + build_file_content = GOGOPROTO_BUILD_CONTENT, + ) + envoy_http_archive( + name = "prometheus_metrics_model", + locations = REPOSITORY_LOCATIONS, + build_file_content = PROMETHEUSMETRICS_BUILD_CONTENT, + ) + envoy_http_archive( + name = "io_opencensus_trace", + locations = REPOSITORY_LOCATIONS, + build_file_content = OPENCENSUSTRACE_BUILD_CONTENT, + ) + +GOOGLEAPIS_BUILD_CONTENT = """ load("@com_google_protobuf//:protobuf.bzl", "cc_proto_library", "py_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") @@ -194,15 +189,9 @@ py_proto_library( visibility = ["//visibility:public"], deps = ["@com_google_protobuf//:protobuf_python"], ) -""", - ) +""" - http_archive( - name = "com_github_gogo_protobuf", - sha256 = GOGOPROTO_SHA256, - strip_prefix = "protobuf-" + GOGOPROTO_RELEASE, - url = "https://github.com/gogo/protobuf/archive/v" + GOGOPROTO_RELEASE + ".tar.gz", - build_file_content = """ +GOGOPROTO_BUILD_CONTENT = """ load("@com_google_protobuf//:protobuf.bzl", "cc_proto_library", "py_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") @@ -255,15 +244,9 @@ py_proto_library( visibility = ["//visibility:public"], deps = ["@com_google_protobuf//:protobuf_python"], ) - """, - ) +""" - http_archive( - name = "prometheus_metrics_model", - strip_prefix = "client_model-" + PROMETHEUS_GIT_SHA, - url = "https://github.com/prometheus/client_model/archive/" + PROMETHEUS_GIT_SHA + ".tar.gz", - sha256 = PROMETHEUS_SHA, - build_file_content = """ +PROMETHEUSMETRICS_BUILD_CONTENT = """ load("@envoy_api//bazel:api_build_system.bzl", "api_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") @@ -281,15 +264,9 @@ go_proto_library( proto = ":client_model", visibility = ["//visibility:public"], ) - """, - ) +""" - http_archive( - name = "io_opencensus_trace", - sha256 = OPENCENSUS_SHA256, - strip_prefix = "opencensus-proto-" + OPENCENSUS_RELEASE + "/src/opencensus/proto/trace/v1", - url = "https://github.com/census-instrumentation/opencensus-proto/archive/v" + OPENCENSUS_RELEASE + ".tar.gz", - build_file_content = """ +OPENCENSUSTRACE_BUILD_CONTENT = """ load("@envoy_api//bazel:api_build_system.bzl", "api_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") @@ -307,5 +284,4 @@ go_proto_library( proto = ":trace_model", visibility = ["//visibility:public"], ) - """, - ) +""" diff --git a/api/bazel/repository_locations.bzl b/api/bazel/repository_locations.bzl new file mode 100644 index 0000000000000..29ca98ac3b731 --- /dev/null +++ b/api/bazel/repository_locations.bzl @@ -0,0 +1,51 @@ +BAZEL_SKYLIB_RELEASE = "0.6.0" +BAZEL_SKYLIB_SHA256 = "eb5c57e4c12e68c0c20bc774bfbc60a568e800d025557bc4ea022c6479acc867" + +GOGOPROTO_RELEASE = "1.2.0" +GOGOPROTO_SHA256 = "957c8f03cf595525d2a667035d9865a0930b3d446be0ab6eb76972934f925b00" + +OPENCENSUS_RELEASE = "0.1.0" +OPENCENSUS_SHA256 = "4fd21cc6de63d7cb979fd749d8101ff425905aa0826fed26019d1c311fcf19a7" + +PGV_RELEASE = "0.0.13" +PGV_SHA256 = "dce6c8a43849d2abe4d5e40f16e9a476bca6b7a47e128db4458a52d748f4a5eb" + +GOOGLEAPIS_GIT_SHA = "d642131a6e6582fc226caf9893cb7fe7885b3411" # May 23, 2018 +GOOGLEAPIS_SHA = "16f5b2e8bf1e747a32f9a62e211f8f33c94645492e9bbd72458061d9a9de1f63" + +PROMETHEUS_GIT_SHA = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c" # Nov 17, 2017 +PROMETHEUS_SHA = "783bdaf8ee0464b35ec0c8704871e1e72afa0005c3f3587f65d9d6694bf3911b" + +REPOSITORY_LOCATIONS = dict( + bazel_skylib = dict( + sha256 = BAZEL_SKYLIB_SHA256, + strip_prefix = "bazel-skylib-" + BAZEL_SKYLIB_RELEASE, + urls = ["https://github.com/bazelbuild/bazel-skylib/archive/" + BAZEL_SKYLIB_RELEASE + ".tar.gz"], + ), + com_lyft_protoc_gen_validate = dict( + sha256 = PGV_SHA256, + strip_prefix = "protoc-gen-validate-" + PGV_RELEASE, + urls = ["https://github.com/lyft/protoc-gen-validate/archive/v" + PGV_RELEASE + ".tar.gz"], + ), + googleapis = dict( + # TODO(dio): Consider writing a Skylark macro for importing Google API proto. + sha256 = GOOGLEAPIS_SHA, + strip_prefix = "googleapis-" + GOOGLEAPIS_GIT_SHA, + urls = ["https://github.com/googleapis/googleapis/archive/" + GOOGLEAPIS_GIT_SHA + ".tar.gz"], + ), + com_github_gogo_protobuf = dict( + sha256 = GOGOPROTO_SHA256, + strip_prefix = "protobuf-" + GOGOPROTO_RELEASE, + urls = ["https://github.com/gogo/protobuf/archive/v" + GOGOPROTO_RELEASE + ".tar.gz"], + ), + prometheus_metrics_model = dict( + sha256 = PROMETHEUS_SHA, + strip_prefix = "client_model-" + PROMETHEUS_GIT_SHA, + urls = ["https://github.com/prometheus/client_model/archive/" + PROMETHEUS_GIT_SHA + ".tar.gz"], + ), + io_opencensus_trace = dict( + sha256 = OPENCENSUS_SHA256, + strip_prefix = "opencensus-proto-" + OPENCENSUS_RELEASE + "/src/opencensus/proto/trace/v1", + urls = ["https://github.com/census-instrumentation/opencensus-proto/archive/v" + OPENCENSUS_RELEASE + ".tar.gz"], + ), +) diff --git a/bazel/repositories.bzl b/bazel/repositories.bzl index 36b04de9c23f0..54045d18ce9a5 100644 --- a/bazel/repositories.bzl +++ b/bazel/repositories.bzl @@ -1,5 +1,6 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load(":genrule_repository.bzl", "genrule_repository") +load("//api/bazel:envoy_http_archive.bzl", "envoy_http_archive") load(":repository_locations.bzl", "REPOSITORY_LOCATIONS") load(":target_recipes.bzl", "TARGET_RECIPES") load( @@ -20,33 +21,9 @@ GO_VERSION = "1.10.4" BUILD_ALL_CONTENT = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])""" def _repository_impl(name, **kwargs): - # `existing_rule_keys` contains the names of repositories that have already - # been defined in the Bazel workspace. By skipping repos with existing keys, - # users can override dependency versions by using standard Bazel repository - # rules in their WORKSPACE files. - existing_rule_keys = native.existing_rules().keys() - if name in existing_rule_keys: - # This repository has already been defined, probably because the user - # wants to override the version. Do nothing. - return - - loc_key = kwargs.pop("repository_key", name) - location = REPOSITORY_LOCATIONS[loc_key] - - # Git tags are mutable. We want to depend on commit IDs instead. Give the - # user a useful error if they accidentally specify a tag. - if "tag" in location: - fail( - "Refusing to depend on Git tag %r for external dependency %r: use 'commit' instead." % - (location["tag"], name), - ) - - # HTTP tarball at a given URL. Add a BUILD file if requested. - http_archive( - name = name, - urls = location["urls"], - sha256 = location["sha256"], - strip_prefix = location.get("strip_prefix", ""), + envoy_http_archive( + name, + locations = REPOSITORY_LOCATIONS, **kwargs ) From 70af22a0fc680d55b0637c413bea785823533b3f Mon Sep 17 00:00:00 2001 From: Dave Protasowski Date: Thu, 21 Feb 2019 17:03:25 -0500 Subject: [PATCH 2/3] rework build recipes This will enable extracting out version details Signed-off-by: Dave Protasowski --- ci/build_container/Dockerfile-ubuntu | 2 +- .../build_recipes/gperftools.sh | 18 +++---- ci/build_container/build_recipes/luajit.sh | 19 ++++--- ci/build_container/build_recipes/versions.py | 51 +++++++++++++++++++ 4 files changed, 73 insertions(+), 17 deletions(-) create mode 100755 ci/build_container/build_recipes/versions.py diff --git a/ci/build_container/Dockerfile-ubuntu b/ci/build_container/Dockerfile-ubuntu index 2d1cf1896736a..2a0ed6355ebe8 100644 --- a/ci/build_container/Dockerfile-ubuntu +++ b/ci/build_container/Dockerfile-ubuntu @@ -4,7 +4,7 @@ COPY ./build_and_install_deps.sh ./recipe_wrapper.sh ./Makefile ./build_containe COPY WORKSPACE /bazel-prebuilt/ COPY ./api /bazel-prebuilt/api COPY ./bazel /bazel-prebuilt/bazel -COPY ./build_recipes/*.sh /build_recipes/ +COPY ./build_recipes /build_recipes COPY ./build_container_ubuntu.sh / diff --git a/ci/build_container/build_recipes/gperftools.sh b/ci/build_container/build_recipes/gperftools.sh index 198af0e0494a2..0e6aa16236fb3 100755 --- a/ci/build_container/build_recipes/gperftools.sh +++ b/ci/build_container/build_recipes/gperftools.sh @@ -6,17 +6,17 @@ if [[ "${OS}" == "Windows_NT" ]]; then exit 0 fi -# TODO(cmluciano): Bump to release 2.8 -# This sha is specifically chosen to fix ppc64le builds that require inclusion -# of asm/ptrace.h -VERSION=fc00474ddc21fff618fc3f009b46590e241e425e -SHA256=18574813a062eee487bc1b761e8024a346075a7cb93da19607af362dc09565ef +SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")" -curl https://github.com/gperftools/gperftools/archive/${VERSION}.tar.gz -sLo gperftools-"$VERSION".tar.gz \ - && echo "$SHA256" gperftools-"$VERSION".tar.gz | sha256sum --check +$($SCRIPT_DIR/versions.py gperftools) -tar xf gperftools-"$VERSION".tar.gz -cd gperftools-"${VERSION}" +FILE_NAME=$(basename "$FILE_URL") + +curl "$FILE_URL" -sLo "$FILE_NAME" \ + && echo "$FILE_SHA256" "$FILE_NAME" | sha256sum --check +tar xf "$FILE_NAME" + +cd "$FILE_PREFIX" ./autogen.sh diff --git a/ci/build_container/build_recipes/luajit.sh b/ci/build_container/build_recipes/luajit.sh index 0484631212bcd..00546892f29b6 100644 --- a/ci/build_container/build_recipes/luajit.sh +++ b/ci/build_container/build_recipes/luajit.sh @@ -2,15 +2,21 @@ set -e -VERSION=2.1.0-beta3 -SHA256=409f7fe570d3c16558e594421c47bdd130238323c9d6fd6c83dedd2aaeb082a8 if [[ "${OS}" == "Windows_NT" ]]; then exit 0 fi -curl https://github.com/LuaJIT/LuaJIT/archive/v"$VERSION".tar.gz -sLo LuaJIT-"$VERSION".tar.gz \ - && echo "$SHA256" LuaJIT-"$VERSION".tar.gz | sha256sum --check -tar xf LuaJIT-"$VERSION".tar.gz +SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")" + +$($SCRIPT_DIR/versions.py luajit) + +FILE_NAME=$(basename "$FILE_URL") + +curl "$FILE_URL" -sLo "$FILE_NAME" \ + && echo "$FILE_SHA256" "$FILE_NAME" | sha256sum --check +tar xf "$FILE_NAME" + +cd "$FILE_PREFIX" # Fixup Makefile with things that cannot be set via env var. cat > luajit_make.diff << 'EOF' @@ -60,8 +66,7 @@ index f56465d..3f4f2fa 100644 ############################################################################## EOF -cd LuaJIT-"$VERSION" -patch -p1 < ../luajit_make.diff +patch -p1 < luajit_make.diff # Default MACOSX_DEPLOYMENT_TARGET is 10.4, which will fail the build at link time on macOS 10.14: # ld: library not found for -lgcc_s.10.4 diff --git a/ci/build_container/build_recipes/versions.py b/ci/build_container/build_recipes/versions.py new file mode 100755 index 0000000000000..1dc60d3adc853 --- /dev/null +++ b/ci/build_container/build_recipes/versions.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +import sys + +LUAJIT_VERSION = '2.1.0-beta3' +LUAJIT_FILE_URL = 'https://github.com/LuaJIT/LuaJIT/archive/v' + LUAJIT_VERSION + '.tar.gz' +LUAJIT_FILE_SHA256 = '409f7fe570d3c16558e594421c47bdd130238323c9d6fd6c83dedd2aaeb082a8' +LUAJIT_FILE_PREFIX = 'LuaJIT-' + LUAJIT_VERSION + +# TODO(cmluciano): Bump to release 2.8 +# This sha is specifically chosen to fix ppc64le builds that require inclusion +# of asm/ptrace.h +GPERFTOOLS_VERSION = 'fc00474ddc21fff618fc3f009b46590e241e425e' +GPERFTOOLS_FILE_URL = 'https://github.com/gperftools/gperftools/archive/' + GPERFTOOLS_VERSION + '.tar.gz' +GPERFTOOLS_FILE_SHA256 = '18574813a062eee487bc1b761e8024a346075a7cb93da19607af362dc09565ef' +GPERFTOOLS_FILE_PREFIX = 'gperftools-' + GPERFTOOLS_VERSION + +RECIPES = dict( + luajit=dict( + version=LUAJIT_VERSION, + url=LUAJIT_FILE_URL, + sha256=LUAJIT_FILE_SHA256, + strip_prefix=LUAJIT_FILE_PREFIX, + ), + gperftools=dict( + version=GPERFTOOLS_VERSION, + url=GPERFTOOLS_FILE_URL, + sha256=GPERFTOOLS_FILE_SHA256, + strip_prefix=GPERFTOOLS_FILE_PREFIX, + )) + +if __name__ == '__main__': + if len(sys.argv) != 2: + print('Usage: %s ' % sys.argv[0]) + sys.exit(1) + name = sys.argv[1] + if name not in RECIPES: + print('Unknown recipie: %s' % recipe) + sys.exit(1) + recipe = RECIPES[name] + print(""" + export VERSION={} + export FILE_URL={} + export FILE_SHA256={} + export FILE_PREFIX={} + """.format( + recipe['version'], + recipe['url'], + recipe['sha256'], + recipe['strip_prefix'], + )) From 72f24f65cf70a77153fdcf36510237a579789221 Mon Sep 17 00:00:00 2001 From: Dave Protasowski Date: Thu, 21 Feb 2019 17:13:20 -0500 Subject: [PATCH 3/3] create a python script to print dependency details Signed-off-by: Dave Protasowski --- .gitignore | 1 + bazel/git_repository_info.py | 22 ---------------------- tools/print_dependencies.py | 36 ++++++++++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 22 deletions(-) delete mode 100755 bazel/git_repository_info.py create mode 100755 tools/print_dependencies.py diff --git a/.gitignore b/.gitignore index e28b33d6b7f0f..8b0defaf147a1 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ BROWSE /build /build_* +*.bzlc .cache /ci/bazel-* /ci/prebuilt/thirdparty diff --git a/bazel/git_repository_info.py b/bazel/git_repository_info.py deleted file mode 100755 index 39b61fc0fb9e2..0000000000000 --- a/bazel/git_repository_info.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -# Quick-and-dirty Python to fetch git repository info in bazel/repository_locations.bzl. - -from __future__ import print_function - -import imp -import sys -import subprocess as sp - -repolocs = imp.load_source('replocs', 'bazel/repository_locations.bzl') - -if __name__ == '__main__': - if len(sys.argv) != 2: - print('Usage: %s ' % sys.argv[0]) - sys.exit(1) - repo = sys.argv[1] - if repo not in repolocs.REPOSITORY_LOCATIONS: - print('Unknown repository: %s' % repo) - sys.exit(1) - repoloc = repolocs.REPOSITORY_LOCATIONS[repo] - print('%s %s' % (repoloc['remote'], repoloc['commit'])) diff --git a/tools/print_dependencies.py b/tools/print_dependencies.py new file mode 100755 index 0000000000000..48ee61b59d056 --- /dev/null +++ b/tools/print_dependencies.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +# Quick-and-dirty python to fetch dependency information + +import imp +import json +import os.path +import re + +API_DEPS = imp.load_source('api', 'api/bazel/repository_locations.bzl') +DEPS = imp.load_source('deps', 'bazel/repository_locations.bzl') +RECIPE_INFO = imp.load_source('recipes', 'ci/build_container/build_recipes/versions.py') + +if __name__ == '__main__': + deps = [] + + DEPS.REPOSITORY_LOCATIONS.update(API_DEPS.REPOSITORY_LOCATIONS) + + for key, loc in DEPS.REPOSITORY_LOCATIONS.items(): + deps.append({ + 'identifier': key, + 'file-sha256': loc.get('sha256'), + 'file-url': loc.get('urls')[0], + 'file-prefix': loc.get('strip_prefix', ''), + }) + + for key, loc in RECIPE_INFO.RECIPES.items(): + deps.append({ + 'identifier': key, + 'file-sha256': loc.get('sha256'), + 'file-url': loc.get('url'), + 'file-prefix': loc.get('strip_prefix', ''), + }) + + deps = sorted(deps, key=lambda k: k['identifier']) + print json.dumps(deps, sort_keys=True, indent=2)