From 792f12b616ea67822064b64febaf43479a890195 Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Tue, 25 Oct 2022 21:41:53 +0800 Subject: [PATCH 1/9] HBASE-25983 javadoc generation fails on openjdk-11.0.11+9 (#4837) Signed-off-by: Nick Dimiduk --- pom.xml | 59 +++++++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 14 deletions(-) diff --git a/pom.xml b/pom.xml index bca2e41958ba..c9f74cde8b03 100644 --- a/pom.xml +++ b/pom.xml @@ -782,7 +782,12 @@ comes in via hbase-thirdparty hbase-shaded-netty--> 3.10.5.Final - 0.5.0 + 0.13.0 + + 0.13.0 1.11.0 2.8.1 1.13 @@ -933,8 +938,7 @@ this parameter by invoking mvn with -Dbuild.id=$BUILD_ID--> ${maven.build.timestamp} bash - + none @@ -1767,8 +1771,6 @@ org.apache.maven.plugins maven-compiler-plugin - ${compileSource} - ${compileSource} true false false @@ -3010,7 +3012,7 @@ org.apache.yetus audience-annotations - ${audience-annotations.version} + ${javadoc.audience-annotations.version} true apidocs @@ -3067,7 +3069,7 @@ org.apache.yetus audience-annotations - ${audience-annotations.version} + ${javadoc.audience-annotations.version} true testapidocs @@ -3157,18 +3159,17 @@ build-with-jdk8 - [1.8,) + 1.8 - - - - - + + ${compileSource} + ${compileSource} + build-with-jdk11 - [1.11,) + [11,) ${releaseTarget} @@ -3188,7 +3189,37 @@ TODO: replicate logic for windows --> 2200m + + 0.14.1 + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven.javadoc.version} + + ${compileSource} + + --ignore-source-errors + + -J-Xmx2G + -J--add-exports + -Jjdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED + + + + + + From 5e251c49d01a45c177d17bac7ee4e923d2bd2df7 Mon Sep 17 00:00:00 2001 From: Nick Dimiduk Date: Tue, 25 Oct 2022 18:18:26 +0200 Subject: [PATCH 2/9] HBASE-27442 Change the way of bumping version in release scripts Signed-off-by: Duo Zhang --- dev-support/create-release/release-util.sh | 34 ++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/dev-support/create-release/release-util.sh b/dev-support/create-release/release-util.sh index 6241a8538a08..fdb078810cef 100755 --- a/dev-support/create-release/release-util.sh +++ b/dev-support/create-release/release-util.sh @@ -685,8 +685,38 @@ function kick_gpg_agent { # Do maven command to set version into local pom function maven_set_version { #input: local this_version="$1" - log "${MVN[@]}" versions:set -DnewVersion="$this_version" - "${MVN[@]}" versions:set -DnewVersion="$this_version" | grep -v "no value" # silence logs + local use_revision='false' + local -a version_splits=() + IFS='.' read -ar version_splits <<< "$(maven_get_version)" + + # Do the right thing based on project and release line. + if [ "${PROJECT}" = 'hbase' ] ; then + if [ "${version_splits[0]}" -le 1 ] ; then + use_revision='false' + elif [ "${version_splits[0]}" -eq 2 ] && [ "${version_splits[1]}" -le 4 ] ; then + use_revision='false' + elif [ "${version_splits[0]}" -eq 2 ] && [ "${version_splits[1]}" -ge 5 ] ; then + use_revision='true' + elif [ "${version_splits[0]}" -ge 3 ] ; then + use_revision='true' + fi + elif [ "${PROJECT}" = 'hbase-thirdparty' ] ; then + use_revision='false' + elif [ "${PROJECT}" = 'hbase-connectors' ] ; then + use_revision='true' + elif [ "${PROJECT}" = 'hbase-filesystem' ] ; then + use_revision='false' + elif [ "${PROJECT}" = 'hbase-operator-tools' ] ; then + use_revision='false' + fi + + if [ "${use_revision}" = 'false' ] ; then + log "${MVN[@]}" versions:set -DnewVersion="$this_version" + "${MVN[@]}" versions:set -DnewVersion="$this_version" | grep -v "no value" # silence logs + else + log "${MVN[@]}" versions:set-property -Dproperty=revision -DnewVersion="$this_version" -DgenerateBackupPoms=false + "${MVN[@]}" versions:set-property -Dproperty=revision -DnewVersion="$this_version" -DgenerateBackupPoms=false | grep -v "no value" # silence logs + fi } # Do maven command to read version from local pom From d9713bf62147fcdd6f0b09df3ff48171ef1878d5 Mon Sep 17 00:00:00 2001 From: Jonathan Albrecht Date: Fri, 28 Oct 2022 00:31:51 -0400 Subject: [PATCH 3/9] HBASE-27167 s390x: Skip tests on unsupported compression libs (#4852) Signed-off-by: Duo Zhang --- .../hbase-compression-aircompressor/pom.xml | 12 ++++++++++++ hbase-compression/hbase-compression-brotli/pom.xml | 12 ++++++++++++ 2 files changed, 24 insertions(+) diff --git a/hbase-compression/hbase-compression-aircompressor/pom.xml b/hbase-compression/hbase-compression-aircompressor/pom.xml index 2ec2403ae9aa..a03930977ba6 100644 --- a/hbase-compression/hbase-compression-aircompressor/pom.xml +++ b/hbase-compression/hbase-compression-aircompressor/pom.xml @@ -178,5 +178,17 @@ + + + s390x-aircompressor-skip-tests + + + s390x + + + + true + + diff --git a/hbase-compression/hbase-compression-brotli/pom.xml b/hbase-compression/hbase-compression-brotli/pom.xml index f5effc8b2101..f740a7cf2ddd 100644 --- a/hbase-compression/hbase-compression-brotli/pom.xml +++ b/hbase-compression/hbase-compression-brotli/pom.xml @@ -163,5 +163,17 @@ + + + s390x-brotli-skip-tests + + + s390x + + + + true + + From d80053641d09caecf34cc878b8789bad30ad273e Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Fri, 28 Oct 2022 18:40:40 +0800 Subject: [PATCH 4/9] HBASE-27446 Spotbugs 4.7.2 report a lot of logging errors when generating report (#4848) Signed-off-by: Guanghao Zhang --- dev-support/docker/Dockerfile | 4 ++-- pom.xml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile index c0e1592549ea..08e4ed38699f 100644 --- a/dev-support/docker/Dockerfile +++ b/dev-support/docker/Dockerfile @@ -73,9 +73,9 @@ ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 # FROM BASE_IMAGE AS SPOTBUGS_DOWNLOAD_IMAGE -ENV SPOTBUGS_VERSION '4.7.2' +ENV SPOTBUGS_VERSION '4.7.3' ENV SPOTBUGS_URL "https://repo.maven.apache.org/maven2/com/github/spotbugs/spotbugs/${SPOTBUGS_VERSION}/spotbugs-${SPOTBUGS_VERSION}.tgz" -ENV SPOTBUGS_SHA512 '8dd0634c0e5760ade8b2cc3e1d988ed58921dbcbd7568a5a6b4c078d2b2d03c54ebb918329d9832e7e76de23e2e865ff2242cde9ac67d8971f71eea0d43ff0c5' +ENV SPOTBUGS_SHA512 '09a9fe0e5a6ec8e9d6d116c361b5c34c9d0560c0271241f02fadee911952adfcd69dc184f6de1cc4d4a8fe2c84c162689ea9a691dcae0779935eedf390fcc4ad' SHELL ["/bin/bash", "-o", "pipefail", "-c"] RUN curl --location --fail --silent --show-error --output /tmp/spotbugs.tgz "${SPOTBUGS_URL}" && \ echo "${SPOTBUGS_SHA512} */tmp/spotbugs.tgz" | sha512sum -c - diff --git a/pom.xml b/pom.xml index c9f74cde8b03..f8805524c0b2 100644 --- a/pom.xml +++ b/pom.xml @@ -853,8 +853,8 @@ 3.1.2 1.5.0.Final 1.3.9-1 - 4.7.2 - 4.7.2.0 + 4.7.3 + 4.7.2.1 3.0.0-M6 2.12 1.0.1 From cdabfd3ca811d1c5ef79e9b6bd8106b2d6d12cd5 Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Fri, 28 Oct 2022 18:41:47 +0800 Subject: [PATCH 5/9] HBASE-27450 Update all our python scripts to use python3 (#4851) Signed-off-by: Guanghao Zhang --- dev-support/Dockerfile | 10 +- dev-support/Jenkinsfile | 12 +- dev-support/Jenkinsfile_GitHub | 8 +- dev-support/checkcompatibility.py | 32 +- dev-support/checkstyle_report.py | 56 ++-- dev-support/create-release/prepend_changes.py | 4 +- .../create-release/prepend_releasenotes.py | 4 +- dev-support/flaky-tests/findHangingTests.py | 38 +-- .../flaky-tests/flaky-reporting.Jenkinsfile | 2 +- dev-support/flaky-tests/report-flakies.py | 28 +- dev-support/gen_redirect_html.py | 8 +- dev-support/submit-patch.py | 311 ------------------ 12 files changed, 101 insertions(+), 412 deletions(-) delete mode 100755 dev-support/submit-patch.py diff --git a/dev-support/Dockerfile b/dev-support/Dockerfile index 923e26563b3a..44822e9fb44b 100644 --- a/dev-support/Dockerfile +++ b/dev-support/Dockerfile @@ -20,16 +20,14 @@ # # Specifically, it's used for the flaky test reporting job defined in # dev-support/flaky-tests/flaky-reporting.Jenkinsfile -FROM ubuntu:18.04 +FROM ubuntu:22.04 COPY . /hbase/dev-support RUN DEBIAN_FRONTEND=noninteractive apt-get -qq -y update \ && DEBIAN_FRONTEND=noninteractive apt-get -qq -y install --no-install-recommends \ - curl='7.58.0-*' \ - python2.7='2.7.17-*' \ - python-pip='9.0.1-*' \ - python-setuptools='39.0.1-*' \ + curl='7.81.0-*' \ + python3-pip='22.0.2+dfsg-*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN pip install -r /hbase/dev-support/python-requirements.txt +RUN pip3 install -r /hbase/dev-support/python-requirements.txt diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index f287f01f637d..2098fff80085 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -267,14 +267,14 @@ pipeline { if [ -d "${OUTPUT_DIR}/branch-site" ]; then echo "Remove ${OUTPUT_DIR}/branch-site for saving space" rm -rf "${OUTPUT_DIR}/branch-site" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html" else echo "No branch-site, skipping" fi if [ -d "${OUTPUT_DIR}/patch-site" ]; then echo "Remove ${OUTPUT_DIR}/patch-site for saving space" rm -rf "${OUTPUT_DIR}/patch-site" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html" else echo "No patch-site, skipping" fi @@ -384,7 +384,7 @@ pipeline { if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space" rm -rf "${OUTPUT_DIR}/test_logs.zip" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" else echo "No test_logs.zip, skipping" fi @@ -493,7 +493,7 @@ pipeline { if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space" rm -rf "${OUTPUT_DIR}/test_logs.zip" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" else echo "No test_logs.zip, skipping" fi @@ -604,7 +604,7 @@ pipeline { if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space" rm -rf "${OUTPUT_DIR}/test_logs.zip" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html" else echo "No test_logs.zip, skipping" fi @@ -773,7 +773,7 @@ pipeline { if [ -f "${SRC_TAR}" ]; then echo "Remove ${SRC_TAR} for saving space" rm -rf "${SRC_TAR}" - python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html" + python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html" else echo "No hbase-src.tar.gz, skipping" fi diff --git a/dev-support/Jenkinsfile_GitHub b/dev-support/Jenkinsfile_GitHub index 8f64e51aa04e..c230f7a8e006 100644 --- a/dev-support/Jenkinsfile_GitHub +++ b/dev-support/Jenkinsfile_GitHub @@ -143,14 +143,14 @@ pipeline { if [ -d "${PATCHDIR}/branch-site" ]; then echo "Remove ${PATCHDIR}/branch-site for saving space" rm -rf "${PATCHDIR}/branch-site" - python2 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${PATCHDIR}/branch-site.html" + python3 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${PATCHDIR}/branch-site.html" else echo "No branch-site, skipping" fi if [ -d "${PATCHDIR}/patch-site" ]; then echo "Remove ${PATCHDIR}/patch-site for saving space" rm -rf "${PATCHDIR}/patch-site" - python2 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${PATCHDIR}/patch-site.html" + python3 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${PATCHDIR}/patch-site.html" else echo "No patch-site, skipping" fi @@ -282,7 +282,7 @@ pipeline { if [ -f "${PATCHDIR}/test_logs.zip" ]; then echo "Remove ${PATCHDIR}/test_logs.zip for saving space" rm -rf "${PATCHDIR}/test_logs.zip" - python2 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html" + python3 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html" else echo "No test_logs.zip, skipping" fi @@ -414,7 +414,7 @@ pipeline { if [ -f "${PATCHDIR}/test_logs.zip" ]; then echo "Remove ${PATCHDIR}/test_logs.zip for saving space" rm -rf "${PATCHDIR}/test_logs.zip" - python2 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html" + python3 ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html" else echo "No test_logs.zip, skipping" fi diff --git a/dev-support/checkcompatibility.py b/dev-support/checkcompatibility.py index c6cc3be9ba25..914f8dd42f17 100755 --- a/dev-support/checkcompatibility.py +++ b/dev-support/checkcompatibility.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -41,7 +41,9 @@ import shutil import subprocess import sys -import urllib2 +import urllib.request +import urllib.error +import urllib.parse from collections import namedtuple try: import argparse @@ -55,11 +57,11 @@ def check_output(*popenargs, **kwargs): - """ Run command with arguments and return its output as a byte string. - Backported from Python 2.7 as it's implemented as pure python on stdlib. - >>> check_output(['/usr/bin/python', '--version']) - Python 2.6.2 """ - process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + """ Run command with arguments and return its output as a byte string. """ + process = subprocess.Popen(stdout=subprocess.PIPE, + universal_newlines=True, + *popenargs, + **kwargs) output, _ = process.communicate() retcode = process.poll() if retcode: @@ -69,7 +71,7 @@ def check_output(*popenargs, **kwargs): error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error - return output + return output.strip() def get_repo_dir(): @@ -161,7 +163,7 @@ def checkout_java_acc(force): url = "https://github.com/lvc/japi-compliance-checker/archive/2.4.tar.gz" scratch_dir = get_scratch_dir() path = os.path.join(scratch_dir, os.path.basename(url)) - jacc = urllib2.urlopen(url) + jacc = urllib.request.urlopen(url) with open(path, 'wb') as w: w.write(jacc.read()) @@ -196,8 +198,8 @@ def ascii_encode_dict(data): """ Iterate through a dictionary of data and convert all unicode to ascii. This method was taken from stackoverflow.com/questions/9590382/forcing-python-json-module-to-work-with-ascii """ - ascii_encode = lambda x: x.encode('ascii') if isinstance(x, unicode) else x - return dict(map(ascii_encode, pair) for pair in data.items()) + ascii_encode = lambda x: x.encode('ascii') if isinstance(x, str) else x + return dict(list(map(ascii_encode, pair)) for pair in list(data.items())) def process_json(path): @@ -229,8 +231,8 @@ def compare_results(tool_results, known_issues, compare_warnings): unexpected_issues = [unexpected_issue(check=check, issue_type=issue_type, known_count=known_count, observed_count=tool_results[check][issue_type]) - for check, known_issue_counts in known_issues.items() - for issue_type, known_count in known_issue_counts.items() + for check, known_issue_counts in list(known_issues.items()) + for issue_type, known_count in list(known_issue_counts.items()) if compare_tool_results_count(tool_results, check, issue_type, known_count)] if not compare_warnings: @@ -309,14 +311,14 @@ def run_java_acc(src_name, src_jars, dst_name, dst_jars, annotations, skip_annot logging.info("Annotations are: %s", annotations) annotations_path = os.path.join(get_scratch_dir(), "annotations.txt") logging.info("Annotations path: %s", annotations_path) - with file(annotations_path, "w") as f: + with open(annotations_path, "w") as f: f.write('\n'.join(annotations)) args.extend(["-annotations-list", annotations_path]) if skip_annotations is not None: skip_annotations_path = os.path.join( get_scratch_dir(), "skip_annotations.txt") - with file(skip_annotations_path, "w") as f: + with open(skip_annotations_path, "w") as f: f.write('\n'.join(skip_annotations)) args.extend(["-skip-annotations-list", skip_annotations_path]) diff --git a/dev-support/checkstyle_report.py b/dev-support/checkstyle_report.py index c5810be152b3..99092c23b8ca 100755 --- a/dev-support/checkstyle_report.py +++ b/dev-support/checkstyle_report.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -29,8 +29,8 @@ from collections import defaultdict if len(sys.argv) != 3 : - print "usage: %s checkstyle-result-master.xml checkstyle-result-patch.xml" % sys.argv[0] - exit(1) + print("usage: %s checkstyle-result-master.xml checkstyle-result-patch.xml" % sys.argv[0]) + sys.exit(1) def path_key(x): path = x.attrib['name'] @@ -40,8 +40,8 @@ def error_name(x): error_class = x.attrib['source'] return error_class[error_class.rfind(".") + 1:] -def print_row(path, error, master_errors, patch_errors): - print '%s\t%s\t%s\t%s' % (path,error, master_errors,patch_errors) +def print_row(path, err, master_errors, patch_errors): + print('%s\t%s\t%s\t%s' % (path, err, master_errors, patch_errors)) master = etree.parse(sys.argv[1]) patch = etree.parse(sys.argv[2]) @@ -49,32 +49,32 @@ def print_row(path, error, master_errors, patch_errors): master_dict = defaultdict(int) ret_value = 0 -for child in master.getroot().getchildren(): +for child in list(master.getroot()): if child.tag != 'file': - continue + continue file = path_key(child) - for error_tag in child.getchildren(): - error = error_name(error_tag) - if (file, error) in master_dict: - master_dict[(file, error)] += 1 - else: - master_dict[(file, error)] = 1 + for error_tag in list(child): + error = error_name(error_tag) + if (file, error) in master_dict: + master_dict[(file, error)] += 1 + else: + master_dict[(file, error)] = 1 -for child in patch.getroot().getchildren(): - if child.tag != 'file': - continue - temp_dict = defaultdict(int) - for error_tag in child.getchildren(): - error = error_name(error_tag) - if error in temp_dict: - temp_dict[error] += 1 - else: - temp_dict[error] = 1 +for child in list(patch.getroot()): + if child.tag != 'file': + continue + temp_dict = defaultdict(int) + for error_tag in list(child): + error = error_name(error_tag) + if error in temp_dict: + temp_dict[error] += 1 + else: + temp_dict[error] = 1 - file = path_key(child) - for error, count in temp_dict.iteritems(): - if count > master_dict[(file, error)]: - print_row(file, error, master_dict[(file, error)], count) - ret_value = 1 + file = path_key(child) + for error, count in temp_dict.items(): + if count > master_dict[(file, error)]: + print_row(file, error, master_dict[(file, error)], count) + ret_value = 1 sys.exit(ret_value) diff --git a/dev-support/create-release/prepend_changes.py b/dev-support/create-release/prepend_changes.py index 7076cb91aed7..bba51594809f 100755 --- a/dev-support/create-release/prepend_changes.py +++ b/dev-support/create-release/prepend_changes.py @@ -1,4 +1,4 @@ -#!/usr/bin/python3 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -22,7 +22,7 @@ if len(sys.argv) != 3: print("usage: %s " % sys.argv[0]) - exit(1) + sys.exit(1) pattern = re.compile(r'^## Release .+ - Unreleased .+$') with open(sys.argv[1], 'r', errors = 'ignore') as new_r, open(sys.argv[2], 'r', errors = 'ignore') as prev_r, open(sys.argv[2] + '.tmp', 'w') as w: diff --git a/dev-support/create-release/prepend_releasenotes.py b/dev-support/create-release/prepend_releasenotes.py index 1dac1346d723..b5bbec1274e2 100755 --- a/dev-support/create-release/prepend_releasenotes.py +++ b/dev-support/create-release/prepend_releasenotes.py @@ -1,4 +1,4 @@ -#!/usr/bin/python3 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -22,7 +22,7 @@ if len(sys.argv) != 3: print("usage: %s " % sys.argv[0]) - exit(1) + sys.exit(1) pattern = re.compile(r'^# .+ Release Notes$') with open(sys.argv[1], 'r', errors = 'ignore') as new_r, open(sys.argv[2], 'r', errors = 'ignore') as prev_r, open(sys.argv[2] + '.tmp', 'w') as w: diff --git a/dev-support/flaky-tests/findHangingTests.py b/dev-support/flaky-tests/findHangingTests.py index f8416e0a1e11..bb0a9a5be619 100755 --- a/dev-support/flaky-tests/findHangingTests.py +++ b/dev-support/flaky-tests/findHangingTests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -45,8 +45,8 @@ def get_bad_tests(console_url): """ response = requests.get(console_url) if response.status_code != 200: - print "Error getting consoleText. Response = {} {}".format( - response.status_code, response.reason) + print("Error getting consoleText. Response = {} {}".format( + response.status_code, response.reason)) return # All tests: All testcases which were run. @@ -59,13 +59,13 @@ def get_bad_tests(console_url): hanging_tests_set = set() failed_tests_set = set() timeout_tests_set = set() - for line in response.content.splitlines(): + for line in response.content.decode("utf-8").splitlines(): result1 = re.findall("Running org.apache.hadoop.hbase.(.*)", line) if len(result1) == 1: test_case = result1[0] if test_case in all_tests_set: - print ("ERROR! Multiple tests with same name '{}'. Might get wrong results " - "for this test.".format(test_case)) + print(("ERROR! Multiple tests with same name '{}'. Might get wrong results " + "for this test.".format(test_case))) else: hanging_tests_set.add(test_case) all_tests_set.add(test_case) @@ -75,9 +75,9 @@ def get_bad_tests(console_url): if "FAILURE!" in line: failed_tests_set.add(test_case) if test_case not in hanging_tests_set: - print ("ERROR! No test '{}' found in hanging_tests. Might get wrong results " + print(("ERROR! No test '{}' found in hanging_tests. Might get wrong results " "for this test. This may also happen if maven is set to retry failing " - "tests.".format(test_case)) + "tests.".format(test_case))) else: hanging_tests_set.remove(test_case) result3 = re.match("^\\s+(\\w*).*\\sTestTimedOut", line) @@ -86,30 +86,30 @@ def get_bad_tests(console_url): timeout_tests_set.add(test_case) for bad_string in BAD_RUN_STRINGS: if re.match(".*" + bad_string + ".*", line): - print "Bad string found in build:\n > {}".format(line) - print "Result > total tests: {:4} failed : {:4} timedout : {:4} hanging : {:4}".format( - len(all_tests_set), len(failed_tests_set), len(timeout_tests_set), len(hanging_tests_set)) + print("Bad string found in build:\n > {}".format(line)) + print("Result > total tests: {:4} failed : {:4} timedout : {:4} hanging : {:4}".format( + len(all_tests_set), len(failed_tests_set), len(timeout_tests_set), len(hanging_tests_set))) return [all_tests_set, failed_tests_set, timeout_tests_set, hanging_tests_set] if __name__ == "__main__": if len(sys.argv) != 2: - print "ERROR : Provide the jenkins job console URL as the only argument." + print("ERROR : Provide the jenkins job console URL as the only argument.") sys.exit(1) - print "Fetching {}".format(sys.argv[1]) + print("Fetching {}".format(sys.argv[1])) result = get_bad_tests(sys.argv[1]) if not result: sys.exit(1) [all_tests, failed_tests, timedout_tests, hanging_tests] = result - print "Found {} hanging tests:".format(len(hanging_tests)) + print("Found {} hanging tests:".format(len(hanging_tests))) for test in hanging_tests: - print test - print "\n" - print "Found {} failed tests of which {} timed out:".format( - len(failed_tests), len(timedout_tests)) + print(test) + print("\n") + print("Found {} failed tests of which {} timed out:".format( + len(failed_tests), len(timedout_tests))) for test in failed_tests: - print "{0} {1}".format(test, ("(Timed Out)" if test in timedout_tests else "")) + print("{0} {1}".format(test, ("(Timed Out)" if test in timedout_tests else ""))) print ("\nA test may have had 0 or more atomic test failures before it timed out. So a " "'Timed Out' test may have other errors too.") diff --git a/dev-support/flaky-tests/flaky-reporting.Jenkinsfile b/dev-support/flaky-tests/flaky-reporting.Jenkinsfile index b37045b3a6c2..f50900954532 100644 --- a/dev-support/flaky-tests/flaky-reporting.Jenkinsfile +++ b/dev-support/flaky-tests/flaky-reporting.Jenkinsfile @@ -47,7 +47,7 @@ pipeline { flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase-Flaky-Tests/job/${BRANCH_NAME}" --is-yetus False --max-builds 50) docker build -t hbase-dev-support dev-support docker run --ulimit nproc=12500 -v "${WORKSPACE}":/hbase -u `id -u`:`id -g` --workdir=/hbase hbase-dev-support \ - python dev-support/flaky-tests/report-flakies.py --mvn -v -o output "${flaky_args[@]}" + ./dev-support/flaky-tests/report-flakies.py --mvn -v -o output "${flaky_args[@]}" ''' sshPublisher(publishers: [ sshPublisherDesc(configName: 'Nightlies', diff --git a/dev-support/flaky-tests/report-flakies.py b/dev-support/flaky-tests/report-flakies.py index e2d30b0d6dc3..16096e3344a5 100755 --- a/dev-support/flaky-tests/report-flakies.py +++ b/dev-support/flaky-tests/report-flakies.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -140,7 +140,7 @@ def expand_multi_config_projects(cli_args): raise Exception("Failed to get job information from jenkins for url '" + job_url + "'. Jenkins returned HTTP status " + str(request.status_code)) response = request.json() - if response.has_key("activeConfigurations"): + if "activeConfigurations" in response: for config in response["activeConfigurations"]: final_expanded_urls.append({'url':config["url"], 'max_builds': max_builds, 'excludes': excluded_builds, 'is_yetus': is_yetus}) @@ -167,7 +167,7 @@ def expand_multi_config_projects(cli_args): url = url_max_build["url"] excludes = url_max_build["excludes"] json_response = requests.get(url + "/api/json?tree=id,builds%5Bnumber,url%5D").json() - if json_response.has_key("builds"): + if "builds" in json_response: builds = json_response["builds"] logger.info("Analyzing job: %s", url) else: @@ -238,27 +238,27 @@ def expand_multi_config_projects(cli_args): # Sort tests in descending order by flakyness. sorted_test_to_build_ids = OrderedDict( - sorted(test_to_build_ids.iteritems(), key=lambda x: x[1]['flakyness'], reverse=True)) + sorted(iter(test_to_build_ids.items()), key=lambda x: x[1]['flakyness'], reverse=True)) url_to_bad_test_results[url] = sorted_test_to_build_ids if len(sorted_test_to_build_ids) > 0: - print "URL: {}".format(url) - print "{:>60} {:10} {:25} {}".format( - "Test Name", "Total Runs", "Bad Runs(failed/timeout/hanging)", "Flakyness") + print("URL: {}".format(url)) + print("{:>60} {:10} {:25} {}".format( + "Test Name", "Total Runs", "Bad Runs(failed/timeout/hanging)", "Flakyness")) for bad_test in sorted_test_to_build_ids: test_status = sorted_test_to_build_ids[bad_test] - print "{:>60} {:10} {:7} ( {:4} / {:5} / {:5} ) {:2.0f}%".format( + print("{:>60} {:10} {:7} ( {:4} / {:5} / {:5} ) {:2.0f}%".format( bad_test, len(test_status['all']), test_status['bad_count'], len(test_status['failed']), len(test_status['timeout']), - len(test_status['hanging']), test_status['flakyness']) + len(test_status['hanging']), test_status['flakyness'])) else: - print "No flaky tests founds." + print("No flaky tests founds.") if len(url_to_build_ids[url]) == len(build_ids_without_tests_run): - print "None of the analyzed builds have test result." + print("None of the analyzed builds have test result.") - print "Builds analyzed: {}".format(url_to_build_ids[url]) - print "Builds without any test runs: {}".format(build_ids_without_tests_run) - print "" + print("Builds analyzed: {}".format(url_to_build_ids[url])) + print("Builds without any test runs: {}".format(build_ids_without_tests_run)) + print("") all_bad_tests = all_hanging_tests.union(all_failed_tests) diff --git a/dev-support/gen_redirect_html.py b/dev-support/gen_redirect_html.py index 8a24a81ac65a..2689fd8aa4a3 100755 --- a/dev-support/gen_redirect_html.py +++ b/dev-support/gen_redirect_html.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2 +#!/usr/bin/env python3 ## # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -20,8 +20,8 @@ from string import Template if len(sys.argv) != 2 : - print "usage: %s " % sys.argv[0] - exit(1) + print("usage: %s " % sys.argv[0]) + sys.exit(1) url = sys.argv[1].replace(" ", "%20") template = Template(""" @@ -34,4 +34,4 @@ """) output = template.substitute(url = url) -print output +print(output) diff --git a/dev-support/submit-patch.py b/dev-support/submit-patch.py deleted file mode 100755 index b4591abf765f..000000000000 --- a/dev-support/submit-patch.py +++ /dev/null @@ -1,311 +0,0 @@ -#!/usr/bin/env python2 -## -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Makes a patch for the current branch, creates/updates the review board request and uploads new -# patch to jira. Patch is named as (JIRA).(branch name).(patch number).patch as per Yetus' naming -# rules. If no jira is specified, patch will be named (branch name).patch and jira and review board -# are not updated. Review board id is retrieved from the remote link in the jira. -# Print help: submit-patch.py --h -import argparse -from builtins import input, str -import getpass -import git -import json -import logging -import os -import re -import requests -import subprocess -import sys - -parser = argparse.ArgumentParser( - epilog = "To avoid having to enter jira/review board username/password every time, setup an " - "encrypted ~/.apache-cred files as follows:\n" - "1) Create a file with following single " - "line: \n{\"jira_username\" : \"appy\", \"jira_password\":\"123\", " - "\"rb_username\":\"appy\", \"rb_password\" : \"@#$\"}\n" - "2) Encrypt it with openssl.\n" - "openssl enc -aes-256-cbc -in -out ~/.apache-creds\n" - "3) Delete original file.\n" - "Now onwards, you'll need to enter this encryption key only once per run. If you " - "forget the key, simply regenerate ~/.apache-cred file again.", - formatter_class=argparse.RawTextHelpFormatter -) -parser.add_argument("-b", "--branch", - help = "Branch to use for generating diff. If not specified, tracking branch " - "is used. If there is no tracking branch, error will be thrown.") - -# Arguments related to Jira. -parser.add_argument("-jid", "--jira-id", - help = "Jira id of the issue. If set, we deduce next patch version from " - "attachments in the jira and also upload the new patch. Script will " - "ask for jira username/password for authentication. If not set, " - "patch is named .patch.") - -# Arguments related to Review Board. -parser.add_argument("-srb", "--skip-review-board", - help = "Don't create/update the review board.", - default = False, action = "store_true") -parser.add_argument("--reviewers", - help = "Comma separated list of users to add as reviewers.") - -# Misc arguments -parser.add_argument("--patch-dir", default = "~/patches", - help = "Directory to store patch files. If it doesn't exist, it will be " - "created. Default: ~/patches") -parser.add_argument("--rb-repo", default = "hbase-git", - help = "Review board repository. Default: hbase-git") -args = parser.parse_args() - -# Setup logger -logging.basicConfig() -logger = logging.getLogger("submit-patch") -logger.setLevel(logging.INFO) - - -def log_fatal_and_exit(*arg): - logger.fatal(*arg) - sys.exit(1) - - -def assert_status_code(response, expected_status_code, description): - if response.status_code != expected_status_code: - log_fatal_and_exit(" Oops, something went wrong when %s. \nResponse: %s %s\nExiting..", - description, response.status_code, response.reason) - - -# Make repo instance to interact with git repo. -try: - repo = git.Repo(os.getcwd()) - git = repo.git -except git.exc.InvalidGitRepositoryError as e: - log_fatal_and_exit(" '%s' is not valid git repo directory.\nRun from base directory of " - "HBase's git repo.", e) - -logger.info(" Active branch: %s", repo.active_branch.name) -# Do not proceed if there are uncommitted changes. -if repo.is_dirty(): - log_fatal_and_exit(" Git status is dirty. Commit locally first.") - - -# Returns base branch for creating diff. -def get_base_branch(): - # if --branch is set, use it as base branch for computing diff. Also check that it's a valid branch. - if args.branch is not None: - base_branch = args.branch - # Check that given branch exists. - for ref in repo.refs: - if ref.name == base_branch: - return base_branch - log_fatal_and_exit(" Branch '%s' does not exist in refs.", base_branch) - else: - # if --branch is not set, use tracking branch as base branch for computing diff. - # If there is no tracking branch, log error and quit. - tracking_branch = repo.active_branch.tracking_branch() - if tracking_branch is None: - log_fatal_and_exit(" Active branch doesn't have a tracking_branch. Please specify base " - " branch for computing diff using --branch flag.") - logger.info(" Using tracking branch as base branch") - return tracking_branch.name - - -# Returns patch name having format (JIRA).(branch name).(patch number).patch. If no jira is -# specified, patch is name (branch name).patch. -def get_patch_name(branch): - if args.jira_id is None: - return branch + ".patch" - - patch_name_prefix = args.jira_id.upper() + "." + branch - return get_patch_name_with_version(patch_name_prefix) - - -# Fetches list of attachments from the jira, deduces next version for the patch and returns final -# patch name. -def get_patch_name_with_version(patch_name_prefix): - # JIRA's rest api is broken wrt to attachments. https://jira.atlassian.com/browse/JRA-27637. - # Using crude way to get list of attachments. - url = "https://issues.apache.org/jira/browse/" + args.jira_id - logger.info("Getting list of attachments for jira %s from %s", args.jira_id, url) - html = requests.get(url) - if html.status_code == 404: - log_fatal_and_exit(" Invalid jira id : %s", args.jira_id) - if html.status_code != 200: - log_fatal_and_exit(" Cannot fetch jira information. Status code %s", html.status_code) - # Iterate over patch names starting from version 1 and return when name is not already used. - content = str(html.content, 'utf-8') - for i in range(1, 1000): - name = patch_name_prefix + "." + ('{0:03d}'.format(i)) + ".patch" - if name not in content: - return name - - -# Validates that patch directory exists, if not, creates it. -def validate_patch_dir(patch_dir): - # Create patch_dir if it doesn't exist. - if not os.path.exists(patch_dir): - logger.warn(" Patch directory doesn't exist. Creating it.") - os.mkdir(patch_dir) - else: - # If patch_dir exists, make sure it's a directory. - if not os.path.isdir(patch_dir): - log_fatal_and_exit(" '%s' exists but is not a directory. Specify another directory.", - patch_dir) - - -# Make sure current branch is ahead of base_branch by exactly 1 commit. Quits if -# - base_branch has commits not in current branch -# - current branch is same as base branch -# - current branch is ahead of base_branch by more than 1 commits -def check_diff_between_branches(base_branch): - only_in_base_branch = list(repo.iter_commits("HEAD.." + base_branch)) - only_in_active_branch = list(repo.iter_commits(base_branch + "..HEAD")) - if len(only_in_base_branch) != 0: - log_fatal_and_exit(" '%s' is ahead of current branch by %s commits. Rebase " - "and try again.", base_branch, len(only_in_base_branch)) - if len(only_in_active_branch) == 0: - log_fatal_and_exit(" Current branch is same as '%s'. Exiting...", base_branch) - if len(only_in_active_branch) > 1: - log_fatal_and_exit(" Current branch is ahead of '%s' by %s commits. Squash into single " - "commit and try again.", base_branch, len(only_in_active_branch)) - - -# If ~/.apache-creds is present, load credentials from it otherwise prompt user. -def get_credentials(): - creds = dict() - creds_filepath = os.path.expanduser("~/.apache-creds") - if os.path.exists(creds_filepath): - try: - logger.info(" Reading ~/.apache-creds for Jira and ReviewBoard credentials") - content = subprocess.check_output("openssl enc -aes-256-cbc -d -in " + creds_filepath, - shell=True) - except subprocess.CalledProcessError as e: - log_fatal_and_exit(" Couldn't decrypt ~/.apache-creds file. Exiting..") - creds = json.loads(content) - else: - creds['jira_username'] = input("Jira username:") - creds['jira_password'] = getpass.getpass("Jira password:") - if not args.skip_review_board: - creds['rb_username'] = input("Review Board username:") - creds['rb_password'] = getpass.getpass("Review Board password:") - return creds - - -def attach_patch_to_jira(issue_url, patch_filepath, patch_filename, creds): - # Upload patch to jira using REST API. - headers = {'X-Atlassian-Token': 'no-check'} - files = {'file': (patch_filename, open(patch_filepath, 'rb'), 'text/plain')} - jira_auth = requests.auth.HTTPBasicAuth(creds['jira_username'], creds['jira_password']) - attachment_url = issue_url + "/attachments" - r = requests.post(attachment_url, headers = headers, files = files, auth = jira_auth) - assert_status_code(r, 200, "uploading patch to jira") - - -def get_jira_summary(issue_url): - r = requests.get(issue_url + "?fields=summary") - assert_status_code(r, 200, "fetching jira summary") - return json.loads(r.content)["fields"]["summary"] - - -def get_review_board_id_if_present(issue_url, rb_link_title): - r = requests.get(issue_url + "/remotelink") - assert_status_code(r, 200, "fetching remote links") - links = json.loads(r.content) - for link in links: - if link["object"]["title"] == rb_link_title: - res = re.search("reviews.apache.org/r/([0-9]+)", link["object"]["url"]) - return res.group(1) - return None - - -base_branch = get_base_branch() -# Remove remote repo name from branch name if present. This assumes that we don't use '/' in -# actual branch names. -base_branch_without_remote = base_branch.split('/')[-1] -logger.info(" Base branch: %s", base_branch) - -check_diff_between_branches(base_branch) - -patch_dir = os.path.abspath(os.path.expanduser(args.patch_dir)) -logger.info(" Patch directory: %s", patch_dir) -validate_patch_dir(patch_dir) - -patch_filename = get_patch_name(base_branch_without_remote) -logger.info(" Patch name: %s", patch_filename) -patch_filepath = os.path.join(patch_dir, patch_filename) - -diff = git.format_patch(base_branch, stdout = True) -with open(patch_filepath, "wb") as f: - f.write(diff.encode('utf8')) - -if args.jira_id is not None: - creds = get_credentials() - issue_url = "https://issues.apache.org/jira/rest/api/2/issue/" + args.jira_id - - attach_patch_to_jira(issue_url, patch_filepath, patch_filename, creds) - - if not args.skip_review_board: - rb_auth = requests.auth.HTTPBasicAuth(creds['rb_username'], creds['rb_password']) - - rb_link_title = "Review Board (" + base_branch_without_remote + ")" - rb_id = get_review_board_id_if_present(issue_url, rb_link_title) - - # If no review board link found, create new review request and add its link to jira. - if rb_id is None: - reviews_url = "https://reviews.apache.org/api/review-requests/" - data = {"repository" : "hbase-git"} - r = requests.post(reviews_url, data = data, auth = rb_auth) - assert_status_code(r, 201, "creating new review request") - review_request = json.loads(r.content)["review_request"] - absolute_url = review_request["absolute_url"] - logger.info(" Created new review request: %s", absolute_url) - - # Use jira summary as review's summary too. - summary = get_jira_summary(issue_url) - # Use commit message as description. - description = repo.head.commit.message - update_draft_data = {"bugs_closed" : [args.jira_id.upper()], "target_groups" : "hbase", - "target_people" : args.reviewers, "summary" : summary, - "description" : description } - draft_url = review_request["links"]["draft"]["href"] - r = requests.put(draft_url, data = update_draft_data, auth = rb_auth) - assert_status_code(r, 200, "updating review draft") - - draft_request = json.loads(r.content)["draft"] - diff_url = draft_request["links"]["draft_diffs"]["href"] - files = {'path' : (patch_filename, open(patch_filepath, 'rb'))} - r = requests.post(diff_url, files = files, auth = rb_auth) - assert_status_code(r, 201, "uploading diff to review draft") - - r = requests.put(draft_url, data = {"public" : True}, auth = rb_auth) - assert_status_code(r, 200, "publishing review request") - - # Add link to review board in the jira. - remote_link = json.dumps({'object': {'url': absolute_url, 'title': rb_link_title}}) - jira_auth = requests.auth.HTTPBasicAuth(creds['jira_username'], creds['jira_password']) - r = requests.post(issue_url + "/remotelink", data = remote_link, auth = jira_auth, - headers={'Content-Type':'application/json'}) - else: - logger.info(" Updating existing review board: https://reviews.apache.org/r/%s", rb_id) - draft_url = "https://reviews.apache.org/api/review-requests/" + rb_id + "/draft/" - diff_url = draft_url + "diffs/" - files = {'path' : (patch_filename, open(patch_filepath, 'rb'))} - r = requests.post(diff_url, files = files, auth = rb_auth) - assert_status_code(r, 201, "uploading diff to review draft") - - r = requests.put(draft_url, data = {"public" : True}, auth = rb_auth) - assert_status_code(r, 200, "publishing review request") From cfe5e64f7db2fbd7efd06d44d5bc1467bc10e5a5 Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Fri, 28 Oct 2022 17:20:14 -0700 Subject: [PATCH 6/9] HBASE-27453 Update downloads.xml for release 2.4.15 --- src/site/xdoc/downloads.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/site/xdoc/downloads.xml b/src/site/xdoc/downloads.xml index 170a79648d30..e95fce46354f 100644 --- a/src/site/xdoc/downloads.xml +++ b/src/site/xdoc/downloads.xml @@ -93,24 +93,24 @@ under the License. - 2.4.14 + 2.4.15 - 2022/08/29 + 2022/10/28 - 2.4.14 vs 2.4.13 + 2.4.15 vs 2.4.14 - Changes + Changes - Release Notes + Release Notes - src (sha512 asc)
- bin (sha512 asc)
- client-bin (sha512 asc) + src (sha512 asc)
+ bin (sha512 asc)
+ client-bin (sha512 asc) stable release From f09ac40105c38be082abc22977ccfc5db8e83fc5 Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Fri, 28 Oct 2022 17:20:40 -0700 Subject: [PATCH 7/9] HBASE-27454 Update downloads.xml for release 2.5.1 --- src/site/xdoc/downloads.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/site/xdoc/downloads.xml b/src/site/xdoc/downloads.xml index e95fce46354f..e351d216e23c 100644 --- a/src/site/xdoc/downloads.xml +++ b/src/site/xdoc/downloads.xml @@ -70,24 +70,24 @@ under the License. - 2.5.0 + 2.5.1 - 2022/08/31 + 2022/10/28 - 2.4.0 vs 2.5.0 + 2.5.1 vs 2.5.0 - Changes + Changes - Release Notes + Release Notes - src (sha512 asc)
- bin (sha512 asc)
- client-bin (sha512 asc) + src (sha512 asc)
+ bin (sha512 asc)
+ client-bin (sha512 asc) From 984d226010558c5b5e54d283b465a645816ffb24 Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Sat, 29 Oct 2022 20:56:42 +0800 Subject: [PATCH 8/9] HBASE-27449 Use xmllint to parse version and revision in release scripts (#4850) Also upgrade our Dockerfile for release scripts to use ubuntu 22.04, maven 3.8.6 and yetus 0.14.1. Use python3 only. Signed-off-by: Guanghao Zhang Signed-off-by: Nick Dimiduk --- .../create-release/hbase-rm/Dockerfile | 52 +++++++------- dev-support/create-release/prepend_changes.py | 2 +- dev-support/create-release/release-build.sh | 2 +- dev-support/create-release/release-util.sh | 67 +++++++++++-------- 4 files changed, 69 insertions(+), 54 deletions(-) diff --git a/dev-support/create-release/hbase-rm/Dockerfile b/dev-support/create-release/hbase-rm/Dockerfile index c43976f61dd1..10cd1a8cb78a 100644 --- a/dev-support/create-release/hbase-rm/Dockerfile +++ b/dev-support/create-release/hbase-rm/Dockerfile @@ -15,11 +15,11 @@ # limitations under the License. # -# Image for building HBase releases. Based on Ubuntu 18.04. +# Image for building HBase releases. Based on Ubuntu 22.04. # # Includes: # * Java 8 -FROM ubuntu:18.04 +FROM ubuntu:22.04 # Install extra needed repos and refresh. @@ -28,40 +28,46 @@ FROM ubuntu:18.04 # the most current package versions (instead of potentially using old versions cached by docker). RUN DEBIAN_FRONTEND=noninteractive apt-get -qq -y update \ && DEBIAN_FRONTEND=noninteractive apt-get -qq -y install --no-install-recommends \ - curl='7.58.0-*' \ - git='1:2.17.1-*' \ - gnupg='2.2.4-*' \ - libcurl4-openssl-dev='7.58.0-*' \ - libxml2-dev='2.9.4+dfsg1-*' \ - lsof='4.89+dfsg-*' \ + curl='7.81.0-*' \ + git='1:2.34.1-*' \ + gnupg='2.2.27-*' \ + libcurl4-openssl-dev='7.81.0-*' \ + libxml2-dev='2.9.13+dfsg-*' \ + libxml2-utils='2.9.13+dfsg-*' \ + lsof='4.93.2+dfsg-*' \ openjdk-8-jdk='8u*' \ - python-pip='9.0.1-*' \ - subversion='1.9.7-*' \ - wget='1.19.4-*' \ + python3-pip='22.0.2+dfsg-*' \ + subversion='1.14.1-*' \ + wget='1.21.2-*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java \ - && pip install \ - python-dateutil==2.8.1 + && pip3 install \ + python-dateutil==2.8.2 SHELL ["/bin/bash", "-o", "pipefail", "-c"] -# Install mvn 3.6.3. -ARG MAVEN_VERSION=3.6.3 -ARG SHA=c35a1803a6e70a126e80b2b3ae33eed961f83ed74d18fcd16909b2d44d7dada3203f1ffe726c17ef8dcca2dcaa9fca676987befeadc9b9f759967a8cb77181c0 -ARG BASE_URL=https://apache.osuosl.org/maven/maven-3/${MAVEN_VERSION}/binaries +# Install maven +ENV MAVEN_VERSION=3.8.6 +ARG MAVEN_URL="https://archive.apache.org/dist/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz" +ARG MAVEN_SHA512="f790857f3b1f90ae8d16281f902c689e4f136ebe584aba45e4b1fa66c80cba826d3e0e52fdd04ed44b4c66f6d3fe3584a057c26dfcac544a60b301e6d0f91c26" RUN mkdir -p /opt/maven \ - && curl -fsSL -o /tmp/apache-maven.tar.gz ${BASE_URL}/apache-maven-${MAVEN_VERSION}-bin.tar.gz \ - && echo "${SHA} /tmp/apache-maven.tar.gz" | sha512sum -c - \ + && curl -fsSL -o /tmp/apache-maven.tar.gz "${MAVEN_URL}" \ + && echo "${MAVEN_SHA512} /tmp/apache-maven.tar.gz" | sha512sum -c - \ && tar -xzf /tmp/apache-maven.tar.gz -C /opt/maven --strip-components=1 \ && rm -f /tmp/apache-maven.tar.gz \ && ln -s /opt/maven/bin/mvn /usr/bin/mvn # Install Apache Yetus -ENV YETUS_VERSION 0.12.0 -RUN curl -L "https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=/yetus/${YETUS_VERSION}/apache-yetus-${YETUS_VERSION}-bin.tar.gz" | \ - tar xvz -C /opt -ENV YETUS_HOME /opt/apache-yetus-${YETUS_VERSION} +ENV YETUS_VERSION 0.14.1 +ARG YETUS_URL="https://archive.apache.org/dist/yetus/${YETUS_VERSION}/apache-yetus-${YETUS_VERSION}-bin.tar.gz" +ARG YETUS_SHA512="068c8426a5c7ecc9281b988123d223a3d772fd725365303bde8287ae1515e623764b89f09a4a9cdc01e7f30631814086b71408b625025a51bdb3c58b213b4109" +RUN mkdir -p /opt/yetus \ + && curl -fsSL -o /tmp/apache-yetus.tar.gz "${YETUS_URL}" \ + && echo "${YETUS_SHA512} */tmp/apache-yetus.tar.gz" | sha512sum -c - \ + && tar -xzf /tmp/apache-yetus.tar.gz -C /opt/yetus --strip-components=1 \ + && rm -f /tmp/apache-yetus.tar.gz +ENV YETUS_HOME /opt/yetus ARG UID ARG RM_USER diff --git a/dev-support/create-release/prepend_changes.py b/dev-support/create-release/prepend_changes.py index bba51594809f..7e9dfbb4e51f 100755 --- a/dev-support/create-release/prepend_changes.py +++ b/dev-support/create-release/prepend_changes.py @@ -24,7 +24,7 @@ print("usage: %s " % sys.argv[0]) sys.exit(1) -pattern = re.compile(r'^## Release .+ - Unreleased .+$') +pattern = re.compile(r'^## Release .+$') with open(sys.argv[1], 'r', errors = 'ignore') as new_r, open(sys.argv[2], 'r', errors = 'ignore') as prev_r, open(sys.argv[2] + '.tmp', 'w') as w: line = prev_r.readline() while line: diff --git a/dev-support/create-release/release-build.sh b/dev-support/create-release/release-build.sh index d471aa931e72..fa9844a2df08 100755 --- a/dev-support/create-release/release-build.sh +++ b/dev-support/create-release/release-build.sh @@ -149,7 +149,7 @@ if [[ "$1" == "tag" ]]; then # check their position when generating release data. We can not put them under the source code # directory because for 3.x+, CHANGES.md and RELEASENOTES.md are not tracked so later when # generating src release tarball, we will reset the git repo - if [[ $(is_tracked "CHANGES.md") == 0 ]]; then + if is_tracked "CHANGES.md"; then git add RELEASENOTES.md CHANGES.md git commit -s -m "Preparing ${PROJECT} release $RELEASE_TAG; tagging and updates to CHANGES.md and RELEASENOTES.md" cp CHANGES.md ../ diff --git a/dev-support/create-release/release-util.sh b/dev-support/create-release/release-util.sh index fdb078810cef..64b481ac3c29 100755 --- a/dev-support/create-release/release-util.sh +++ b/dev-support/create-release/release-util.sh @@ -60,8 +60,11 @@ function read_config { } function parse_version { - grep -e '.*' | \ - head -n 2 | tail -n 1 | cut -d'>' -f2 | cut -d '<' -f1 + xmllint --xpath "//*[local-name()='project']/*[local-name()='version']/text()" - +} + +function parse_revision { + xmllint --xpath "//*[local-name()='project']/*[local-name()='properties']/*[local-name()='revision']/text()" - } function banner { @@ -135,6 +138,8 @@ function get_api_diff_version { # Get all branches that begin with 'branch-', the hbase convention for # release branches, sort them and then pop off the most recent. function get_release_info { + init_xmllint + PROJECT="$(read_config "PROJECT" "$PROJECT")" export PROJECT @@ -164,6 +169,14 @@ function get_release_info { local version version="$(curl -s "$ASF_REPO_WEBUI;a=blob_plain;f=pom.xml;hb=refs/heads/$GIT_BRANCH" | parse_version)" + # We do not want to expand ${revision} here, see https://maven.apache.org/maven-ci-friendly.html + # If we use ${revision} as placeholder, we need to parse the revision property to + # get maven version + # shellcheck disable=SC2016 + if [[ "${version}" == '${revision}' ]]; then + version="$(curl -s "$ASF_REPO_WEBUI;a=blob_plain;f=pom.xml;hb=refs/heads/$GIT_BRANCH" | + parse_revision)" + fi log "Current branch VERSION is $version." NEXT_VERSION="$version" @@ -344,6 +357,17 @@ function init_locale { export LANG="$locale_value" } +# Check whether xmllint is available +function init_xmllint { + if ! [ -x "$(command -v xmllint)" ]; then + log "Error: xmllint is not available, we need to use it for parsing pom.xml." >&2 + log "Ubuntu: apt install libxml2-utils" >&2 + log "CentOS: yum install xmlstarlet" >&2 + log "Mac OS: brew install xmlstarlet" >&2 + exit 1 + fi +} + # Initializes JAVA_VERSION to the version of the JVM in use. function init_java { if [ -z "$JAVA_HOME" ]; then @@ -355,10 +379,10 @@ function init_java { } function init_python { - if ! [ -x "$(command -v python2)" ]; then - error 'python2 needed by yetus. Install or add link? E.g: sudo ln -sf /usr/bin/python2.7 /usr/local/bin/python2' + if ! [ -x "$(command -v python3)" ]; then + error 'python3 needed by yetus and api report. Install or add link?' fi - log "python version: $(python2 --version)" + log "python3 version: $(python3 --version)" } # Set MVN @@ -440,8 +464,8 @@ function git_clone_overwrite { log "Clone will be of the gitbox repo for ${PROJECT}." if [ -n "${ASF_USERNAME}" ] && [ -n "${ASF_PASSWORD}" ]; then # Ugly! - encoded_username=$(python -c "import urllib; print urllib.quote('''$ASF_USERNAME''', '')") - encoded_password=$(python -c "import urllib; print urllib.quote('''$ASF_PASSWORD''', '')") + encoded_username=$(python3 -c "from urllib.parse import quote; print(quote('''$ASF_USERNAME''', ''))") + encoded_password=$(python3 -c "from urllib.parse import quote; print(quote('''$ASF_PASSWORD''', ''))") GIT_REPO="https://$encoded_username:$encoded_password@${asf_repo}" else GIT_REPO="https://${asf_repo}" @@ -533,7 +557,7 @@ function get_jira_name { # Update the CHANGES.md # DOES NOT DO COMMITS! Caller should do that. # requires yetus to have a defined home already. -# yetus requires python2 to be on the path. +# yetus requires python3 to be on the path. function update_releasenotes { local project_dir="$1" local jira_fix_version="$2" @@ -686,28 +710,13 @@ function kick_gpg_agent { function maven_set_version { #input: local this_version="$1" local use_revision='false' - local -a version_splits=() - IFS='.' read -ar version_splits <<< "$(maven_get_version)" - - # Do the right thing based on project and release line. - if [ "${PROJECT}" = 'hbase' ] ; then - if [ "${version_splits[0]}" -le 1 ] ; then - use_revision='false' - elif [ "${version_splits[0]}" -eq 2 ] && [ "${version_splits[1]}" -le 4 ] ; then - use_revision='false' - elif [ "${version_splits[0]}" -eq 2 ] && [ "${version_splits[1]}" -ge 5 ] ; then - use_revision='true' - elif [ "${version_splits[0]}" -ge 3 ] ; then - use_revision='true' - fi - elif [ "${PROJECT}" = 'hbase-thirdparty' ] ; then - use_revision='false' - elif [ "${PROJECT}" = 'hbase-connectors' ] ; then + local maven_version + maven_version="$(parse_version < pom.xml)" + # We do not want to expand ${revision} here, see https://maven.apache.org/maven-ci-friendly.html + # If we use ${revision} as placeholder, the way to bump maven version will be different + # shellcheck disable=SC2016 + if [[ "${maven_version}" == '${revision}' ]]; then use_revision='true' - elif [ "${PROJECT}" = 'hbase-filesystem' ] ; then - use_revision='false' - elif [ "${PROJECT}" = 'hbase-operator-tools' ] ; then - use_revision='false' fi if [ "${use_revision}" = 'false' ] ; then From ba40de13175d842909370f23c1373da48b6badc9 Mon Sep 17 00:00:00 2001 From: LiangJun He <2005hithlj@163.com> Date: Sat, 29 Oct 2022 21:41:18 +0800 Subject: [PATCH 9/9] HBASE-27448 Add an admin method to get replication enabled state --- .../org/apache/hadoop/hbase/client/Admin.java | 8 ++++ .../hbase/client/AdminOverAsyncAdmin.java | 5 ++ .../hadoop/hbase/client/AsyncAdmin.java | 8 ++++ .../hadoop/hbase/client/AsyncHBaseAdmin.java | 5 ++ .../hbase/client/RawAsyncHBaseAdmin.java | 14 ++++++ .../main/protobuf/server/master/Master.proto | 10 ++++ .../hbase/master/MasterRpcServices.java | 14 ++++++ .../replication/ReplicationPeerManager.java | 9 ++++ .../TestGetReplicationPeerState.java | 48 +++++++++++++++++++ .../hbase/rsgroup/VerifyingRSGroupAdmin.java | 5 ++ .../hbase/thrift2/client/ThriftAdmin.java | 5 ++ 11 files changed, 131 insertions(+) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestGetReplicationPeerState.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index f5da0aa0bde7..d748ee75fd2a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -2555,4 +2555,12 @@ List getLogEntries(Set serverNames, String logType, Server * Flush master local region */ void flushMasterStore() throws IOException; + + /** + * Check if a replication peer is enabled. + * @param peerId id of replication peer to check + * @return true if replication peer is enabled + * @throws IOException if a remote or network exception occurs + */ + boolean isReplicationPeerEnabled(String peerId) throws IOException; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java index 9e2b990d91c1..8fe15c47657f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java @@ -1084,4 +1084,9 @@ public List getLogEntries(Set serverNames, String logType, public void flushMasterStore() throws IOException { get(admin.flushMasterStore()); } + + @Override + public boolean isReplicationPeerEnabled(String peerId) throws IOException { + return get(admin.isReplicationPeerEnabled(peerId)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java index 6070c553f5e1..fb201411c208 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java @@ -1776,4 +1776,12 @@ CompletableFuture> getLogEntries(Set serverNames, Str * Flush master local region */ CompletableFuture flushMasterStore(); + + /** + * Check if a replication peer is enabled. + * @param peerId id of replication peer to check + * @return true if replication peer is enabled. The return value will be wrapped by a + * {@link CompletableFuture}. + */ + CompletableFuture isReplicationPeerEnabled(String peerId); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java index a8f93dd506d4..841fe4b1df9f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java @@ -959,4 +959,9 @@ public CompletableFuture> getLogEntries(Set serverNam public CompletableFuture flushMasterStore() { return wrap(rawAdmin.flushMasterStore()); } + + @Override + public CompletableFuture isReplicationPeerEnabled(String peerId) { + return wrap(rawAdmin.isReplicationPeerEnabled(peerId)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java index 4d614907326e..83a88b59a106 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java @@ -192,6 +192,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetReplicationPeerStateRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetReplicationPeerStateResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest; @@ -4284,4 +4286,16 @@ Void> call(controller, stub, request.build(), (s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null)) .call(); } + + @Override + public CompletableFuture isReplicationPeerEnabled(String peerId) { + GetReplicationPeerStateRequest.Builder request = GetReplicationPeerStateRequest.newBuilder(); + request.setPeerId(peerId); + return this. newMasterCaller() + .action((controller, stub) -> this. call(controller, stub, request.build(), + (s, c, req, done) -> s.isReplicationPeerEnabled(c, req, done), + resp -> resp.getIsEnabled())) + .call(); + } } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/master/Master.proto b/hbase-protocol-shaded/src/main/protobuf/server/master/Master.proto index 257abe8f11ca..8285c62c055a 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/master/Master.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/master/Master.proto @@ -761,6 +761,13 @@ message ModifyColumnStoreFileTrackerResponse { message FlushMasterStoreRequest {} message FlushMasterStoreResponse {} +message GetReplicationPeerStateRequest { + required string peer_id = 1; +} +message GetReplicationPeerStateResponse { + required bool is_enabled = 1; +} + service MasterService { /** Used by the client to get the number of regions that have received the updated schema */ rpc GetSchemaAlterStatus(GetSchemaAlterStatusRequest) @@ -1203,6 +1210,9 @@ service MasterService { rpc FlushMasterStore(FlushMasterStoreRequest) returns(FlushMasterStoreResponse); + + rpc IsReplicationPeerEnabled(GetReplicationPeerStateRequest) + returns(GetReplicationPeerStateResponse); } // HBCK Service definitions. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 4a490b1e127c..5ef7bf60a1af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -246,6 +246,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetReplicationPeerStateRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetReplicationPeerStateResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; @@ -3491,4 +3493,16 @@ public FlushMasterStoreResponse flushMasterStore(RpcController controller, } return FlushMasterStoreResponse.newBuilder().build(); } + + @Override + public GetReplicationPeerStateResponse isReplicationPeerEnabled(RpcController controller, + GetReplicationPeerStateRequest request) throws ServiceException { + boolean isEnabled; + try { + isEnabled = server.getReplicationPeerManager().getPeerState(request.getPeerId()); + } catch (ReplicationException ioe) { + throw new ServiceException(ioe); + } + return GetReplicationPeerStateResponse.newBuilder().setIsEnabled(isEnabled).build(); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java index 0d4e11197cd1..06cf559d4923 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java @@ -271,6 +271,15 @@ private void setPeerState(String peerId, boolean enabled) throws ReplicationExce desc.getSyncReplicationState())); } + public boolean getPeerState(String peerId) throws ReplicationException { + ReplicationPeerDescription desc = peers.get(peerId); + if (desc != null) { + return desc.isEnabled(); + } else { + throw new ReplicationException("Replication Peer of " + peerId + " does not exist."); + } + } + public void enablePeer(String peerId) throws ReplicationException { setPeerState(peerId, true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestGetReplicationPeerState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestGetReplicationPeerState.java new file mode 100644 index 000000000000..af38cdbc16fa --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestGetReplicationPeerState.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.replication; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.ReplicationTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ ReplicationTests.class, LargeTests.class }) +public class TestGetReplicationPeerState extends TestReplicationBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestGetReplicationPeerState.class); + + @Test + public void testGetReplicationPeerState() throws Exception { + + // Test disable replication peer + hbaseAdmin.disableReplicationPeer("2"); + assertFalse(hbaseAdmin.isReplicationPeerEnabled("2")); + + // Test enable replication peer + hbaseAdmin.enableReplicationPeer("2"); + assertTrue(hbaseAdmin.isReplicationPeerEnabled("2")); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java index 3c0658455f3a..a96c8a918c9f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java @@ -939,4 +939,9 @@ public Future modifyTableStoreFileTrackerAsync(TableName tableName, String public void flushMasterStore() throws IOException { admin.flushMasterStore(); } + + @Override + public boolean isReplicationPeerEnabled(String peerId) throws IOException { + return admin.isReplicationPeerEnabled(peerId); + } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java index 13a1b9920ecf..1e8c0b2d1049 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java @@ -1314,4 +1314,9 @@ public Future modifyTableStoreFileTrackerAsync(TableName tableName, String public void flushMasterStore() throws IOException { throw new NotImplementedException("flushMasterStore not supported in ThriftAdmin"); } + + @Override + public boolean isReplicationPeerEnabled(String peerId) throws IOException { + throw new NotImplementedException("isReplicationPeerEnabled not supported in ThriftAdmin"); + } }