Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
33ee470
HBASE-24135 HBase RS Group
ddupg Apr 10, 2020
126c678
HBASE-24161 [flakey test] locking.TestEntityLocks.testEntityLockTimeo…
huaxiangsun Apr 10, 2020
4f17a02
HBASE-24162 Move CHANGES.txt to CHANGES.md. Add RELEASENOTES.md too o…
saintstack Apr 10, 2020
d6e7692
HBASE-24126 Up the container nproc uplimit from 10000 to 12500 (#1450)
saintstack Apr 10, 2020
3d6052a
HBASE-23998. Update license for jetty-client. (#1297) (#1498)
HorizonNet Apr 12, 2020
5179717
HBASE-24140 : Move CandidateGenerator and their implementors out of S…
virajjasani Apr 12, 2020
2afeb8d
HBASE-24168 Align the internal.protobuf.version in hbase-protocol-sha…
ArthurSXL8 Apr 13, 2020
2a93daa
HBASE-24175 [Flakey Tests] TestSecureExportSnapshot FileNotFoundExcep…
saintstack Apr 12, 2020
e4804d7
Revert "HBASE-24126 Up the container nproc uplimit from 10000 to 1250…
saintstack Apr 10, 2020
b842eef
HBASE-24167 [Flakey Tests] TestHRegionWithInMemoryFlush#testWritesWhi…
saintstack Apr 11, 2020
d44853a
HBASE-24158 [Flakey Tests] TestAsyncTableGetMultiThreaded
saintstack Apr 9, 2020
974ccf5
HBASE-24150 Allow module tests run in parallel (#1464)
saintstack Apr 9, 2020
ba8284a
HBASE-24134 Down forked JVM heap size from 2800m to 2200m for jdk8 an…
saintstack Apr 8, 2020
f4ed92f
HBASE-24132 Upgrade to Apache ZooKeeper 3.5.7 (#1453)
francisjjf Apr 13, 2020
81ea87b
HBASE-24126 Up the container nproc uplimit from 10000 to 12500 (#1504)
saintstack Apr 13, 2020
3a724cc
HBASE-24176 user_permission '.*' command failed to show all table per…
songxincun Apr 14, 2020
1204d52
HBASE-24181 Add region info when log meessages in HRegion. (#1506)
binlijin Apr 14, 2020
66f62ff
HBASE-23994: Add WebUI to Canary (#1292)
GeorryHuang Apr 14, 2020
c087a52
HBASE-23956 Use less resources running tests (#1266)
saintstack Mar 11, 2020
f4569c3
HBASE-23956 Use less resources running tests (#1266)
saintstack Mar 11, 2020
71bb097
HBASE-23987 NettyRpcClientConfigHelper will not share event loop by d…
Apache9 Mar 14, 2020
434e4ad
HBASE-24112 [RSGroup] Support renaming rsgroup (#1520)
Reidddddd Apr 15, 2020
bcc54e9
HBASE-24175 [Flakey Tests] TestSecureExportSnapshot FileNotFoundExcep…
saintstack Apr 15, 2020
266b6bb
HBASE-24183 [flakey test] replication.TestAddToSerialReplicationPeer …
huaxiangsun Apr 15, 2020
c8999f5
HBASE-24175 [Flakey Tests] TestSecureExportSnapshot FileNotFoundExcep…
saintstack Apr 16, 2020
6d85f23
HBASE-24197 TestHttpServer.testBindAddress failure with latest jetty …
stoty Apr 16, 2020
cd332e8
HBASE-24151 The master server aborted for IllegalThreadStateException…
Gkkkk302 Apr 16, 2020
344b0f2
HBASE-24194 : Refactor anonymous inner classes of BufferedEncodedSeek…
virajjasani Apr 16, 2020
6f937fa
HBASE-24186: RegionMover ignores replicationId (#1512)
BukrosSzabolcs Apr 16, 2020
cccd628
HBASE-24195 : Admin.getRegionServers() should return live servers exc…
virajjasani Apr 16, 2020
5be038e
HBASE-24158 [Flakey Tests] TestAsyncTableGetMultiThreaded
saintstack Apr 16, 2020
c05c5a5
HBASE-24203 Bump git version in Dockerfile (#1538)
petersomogyi Apr 17, 2020
70835d0
MetricsTable#updateFlushTime is wrong (#1539)
gkanade Apr 17, 2020
96a3cb1
HBASE-24148: Upgrade Thrift to 0.13.0: 0.12.0 has outstanding CVEs.
tamaashu Apr 8, 2020
3620d44
HBASE-23833. The relocated hadoop-thirdparty protobuf breaks HBase as…
jojochuang Apr 17, 2020
a298973
HBASE-24182 log when the region is set to closing status (#1509)
ArthurSXL8 Apr 17, 2020
a409cb8
HBASE-24143 [JDK11] Switch default garbage collector from CMS
ndimiduk Apr 15, 2020
ce22902
HBASE-24203 Bump git version in Dockerfile; addendum (#1543)
petersomogyi Apr 17, 2020
73513b1
HBASE-24166 Duplicate implementation for acquireLock between CreateTa…
ddupg Apr 18, 2020
1896536
HBASE-24174 Fix findbugs warning on ServiceAuthorizationManager
Reidddddd Apr 18, 2020
47dd396
HBASE-24135 HBase RS Group
ddupg Apr 10, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7,088 changes: 7,088 additions & 0 deletions CHANGES.md

Large diffs are not rendered by default.

4,455 changes: 0 additions & 4,455 deletions CHANGES.txt

This file was deleted.

10,017 changes: 10,017 additions & 0 deletions RELEASENOTES.md

Large diffs are not rendered by default.

26 changes: 19 additions & 7 deletions bin/hbase
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,21 @@ if [ -f "$HBASE_HOME/conf/hbase-env-$COMMAND.sh" ]; then
. "$HBASE_HOME/conf/hbase-env-$COMMAND.sh"
fi

# establish a default value for HBASE_OPTS if it's not already set. For now,
# all we set is the garbage collector.
if [ -z "${HBASE_OPTS}" ] ; then
major_version_number="$(parse_java_major_version "$(read_java_version)")"
case "$major_version_number" in
8|9|10)
HBASE_OPTS="-XX:+UseConcMarkSweepGC"
;;
11|*)
HBASE_OPTS="-XX:+UseG1GC"
;;
esac
export HBASE_OPTS
fi

add_size_suffix() {
# add an 'm' suffix if the argument is missing one, otherwise use whats there
local val="$1"
Expand Down Expand Up @@ -678,18 +693,15 @@ if [ "${HBASE_JDK11}" != "" ]; then
fi
else
# Use JDK detection
JAVA=$JAVA_HOME/bin/java

version=$($JAVA -version 2>&1 | awk -F '"' '/version/ {print $2}')
# '-' check is for cases such as "13-ea"
version_number=$(echo "$version" | cut -d'.' -f1 | cut -d'-' -f1)
version="$(read_java_version)"
major_version_number="$(parse_java_major_version "$version")"

if [ "${DEBUG}" = "true" ]; then
echo "HBASE_JDK11 not set hence using JDK detection."
echo "Extracted JDK version - ${version}, version_number - ${version_number}"
echo "Extracted JDK version - ${version}, major_version_number - ${major_version_number}"
fi

if [[ "$version_number" -ge "11" ]]; then
if [[ "$major_version_number" -ge "11" ]]; then
if [ "${DEBUG}" = "true" ]; then
echo "Version ${version} is greater-than/equal to 11 hence adding JDK11 jars to classpath."
fi
Expand Down
23 changes: 23 additions & 0 deletions bin/hbase-config.sh
Original file line number Diff line number Diff line change
Expand Up @@ -168,3 +168,26 @@ if [ -z "$JAVA_HOME" ]; then
EOF
exit 1
fi

function read_java_version() {
properties="$("${JAVA_HOME}/bin/java" -XshowSettings:properties -version 2>&1)"
echo "${properties}" | "${GREP}" java.runtime.version | head -1 | "${SED}" -e 's/.* = \([^ ]*\)/\1/'
}

# Inspect the system properties exposed by this JVM to identify the major
# version number. Normalize on the popular version number, thus consider JDK
# 1.8 as version "8".
function parse_java_major_version() {
complete_version=$1
# split off suffix version info like '-b10' or '+10' or '_10'
# careful to not use GNU Sed extensions
version="$(echo "$complete_version" | "${SED}" -e 's/+/_/g' -e 's/-/_/g' | cut -d'_' -f1)"
case "$version" in
1.*)
echo "$version" | cut -d'.' -f2
;;
*)
echo "$version" | cut -d'.' -f1
;;
esac
}
12 changes: 8 additions & 4 deletions conf/hbase-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@
# export HBASE_OFFHEAPSIZE=1G

# Extra Java runtime options.
# Below are what we set by default. May only work with SUN JVM.
# For more on why as well as other possible settings,
# see http://hbase.apache.org/book.html#performance
export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC"
# Default settings are applied according to the detected JVM version. Override these default
# settings by specifying a value here. For more details on possible settings,
# see http://hbase.apache.org/book.html#_jvm_tuning
# export HBASE_OPTS

# Uncomment one of the below three options to enable java garbage collection logging for the server-side processes.

Expand Down Expand Up @@ -137,3 +137,7 @@ export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC"
# Tell HBase whether it should include Hadoop's lib when start up,
# the default value is false,means that includes Hadoop's lib.
# export HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP="true"

# Override text processing tools for use by these launch scripts.
export GREP="${GREP-grep}"
export SED="${SED-sed}"
4 changes: 2 additions & 2 deletions dev-support/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update && \
bash=4.4.18-2ubuntu1.2 \
build-essential=12.4ubuntu1 \
diffutils=1:3.6-1 \
git=1:2.17.1-1ubuntu0.5 \
git=1:2.17.1-1ubuntu0.6 \
rsync=3.1.2-2.1ubuntu1 \
tar=1.29b-2ubuntu0.1 \
wget=1.19.4-1ubuntu2.2 \
Expand Down Expand Up @@ -178,7 +178,7 @@ RUN mkdir -p /usr/lib/jvm && \
# TODO (nd): is this really true? investigate and file a ticket.
ENV SPOTBUGS_HOME '/opt/spotbugs'
ENV MAVEN_HOME '/opt/maven'
ENV MAVEN_OPTS '-Xmx4G'
ENV MAVEN_OPTS '-Xmx3.6G'

CMD ["/bin/bash"]

Expand Down
2 changes: 1 addition & 1 deletion dev-support/flaky-tests/flaky-reporting.Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ pipeline {
flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase%20Nightly/job/${BRANCH_NAME}" --is-yetus True --max-builds 10)
flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase-Flaky-Tests/job/${BRANCH_NAME}" --is-yetus False --max-builds 30)
docker build -t hbase-dev-support dev-support
docker run -v "${WORKSPACE}":/hbase --workdir=/hbase hbase-dev-support python dev-support/flaky-tests/report-flakies.py --mvn -v "${flaky_args[@]}"
docker run --ulimit nproc=12500 -v "${WORKSPACE}":/hbase --workdir=/hbase hbase-dev-support python dev-support/flaky-tests/report-flakies.py --mvn -v "${flaky_args[@]}"
'''
}
}
Expand Down
7 changes: 5 additions & 2 deletions dev-support/hbase-personality.sh
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ function personality_globals
# Yetus 0.7.0 enforces limits. Default proclimit is 1000.
# Up it. See HBASE-19902 for how we arrived at this number.
#shellcheck disable=SC2034
PROCLIMIT=10000
PROC_LIMIT=12500

# Set docker container to run with 20g. Default is 4g in yetus.
# See HBASE-19902 for how we arrived at 20g.
Expand Down Expand Up @@ -137,7 +137,10 @@ function personality_modules

clear_personality_queue

extra="-DHBasePatchProcess"
# At a few points, hbase modules can run build, test, etc. in parallel
# Let it happen. Means we'll use more CPU but should be for short bursts.
# https://cwiki.apache.org/confluence/display/MAVEN/Parallel+builds+in+Maven+3
extra="--threads=2 -DHBasePatchProcess"
if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
extra="${extra} -Dhttps.protocols=TLSv1.2"
fi
Expand Down
2 changes: 1 addition & 1 deletion dev-support/hbase_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -159,4 +159,4 @@ done
echo "Successfully built ${IMAGE_NAME}."

echo "Starting hbase shell..."
docker run -it ${IMAGE_NAME}
docker run --ulimit nproc=12500 -it ${IMAGE_NAME}
2 changes: 1 addition & 1 deletion dev-support/hbase_docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update && \
DEBIAN_FRONTEND=noninteractive apt-get -qq install --no-install-recommends -y \
ca-certificates=20180409 \
curl=7.58.0-2ubuntu3.8 \
git=1:2.17.1-1ubuntu0.5 \
git=1:2.17.1-1ubuntu0.6 \
locales=2.27-3ubuntu1 \
&& \
apt-get clean && \
Expand Down
2 changes: 1 addition & 1 deletion dev-support/hbase_nightly_source-artifact.sh
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ fi

cd "${unpack_dir}"
echo "Follow the ref guide section on making a RC: Step 8 Build the binary tarball."
if mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" clean install \
if mvn --threads=2 -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" clean install \
assembly:single >"${working_dir}/srctarball_install.log" 2>&1; then
for artifact in "${unpack_dir}"/hbase-assembly/target/hbase-*-bin.tar.gz; do
if [ -f "${artifact}" ]; then
Expand Down
2 changes: 0 additions & 2 deletions dev-support/hbase_nightly_yetus.sh
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,6 @@ YETUS_ARGS=("--sentinel" "${YETUS_ARGS[@]}")
YETUS_ARGS=("--branch=${BRANCH_NAME}" "${YETUS_ARGS[@]}")
YETUS_ARGS=("--tests-filter=${TESTS_FILTER}" "${YETUS_ARGS[@]}")
YETUS_ARGS=("--ignore-unknown-options=true" "${YETUS_ARGS[@]}")
# Why are these not being picked up from hbase-personality?
YETUS_ARGS=("--proclimit=10000" "${YETUS_ARGS[@]}")
YETUS_ARGS=("--dockermemlimit=20g" "${YETUS_ARGS[@]}")

if [[ -n "${EXCLUDE_TESTS_URL}" ]]; then
Expand Down
1 change: 0 additions & 1 deletion dev-support/jenkins_precommit_github_yetus.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ YETUS_ARGS+=("--reapermode=kill")
# set relatively high limits for ASF machines
# changing these to higher values may cause problems
# with other jobs on systemd-enabled machines
YETUS_ARGS+=("--proclimit=10000")
YETUS_ARGS+=("--dockermemlimit=20g")
# -1 spotbugs issues that show up prior to the patch being applied
YETUS_ARGS+=("--spotbugs-strict-precheck")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;

import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;

/**
* The administrative API for HBase. Obtain an instance from {@link Connection#getAdmin()} and
* call {@link #close()} when done.
Expand Down Expand Up @@ -1660,7 +1662,28 @@ default Collection<ServerName> getBackupMasters() throws IOException {
* @throws IOException if a remote or network exception occurs
*/
default Collection<ServerName> getRegionServers() throws IOException {
return getClusterMetrics(EnumSet.of(Option.SERVERS_NAME)).getServersName();
return getRegionServers(false);
}

/**
* Retrieve all current live region servers including decommissioned
* if excludeDecommissionedRS is false, else non-decommissioned ones only
*
* @param excludeDecommissionedRS should we exclude decommissioned RS nodes
* @return all current live region servers including/excluding decommissioned hosts
* @throws IOException if a remote or network exception occurs
*/
default Collection<ServerName> getRegionServers(boolean excludeDecommissionedRS)
throws IOException {
List<ServerName> allServers =
getClusterMetrics(EnumSet.of(Option.SERVERS_NAME)).getServersName();
if (!excludeDecommissionedRS) {
return allServers;
}
List<ServerName> decommissionedRegionServers = listDecommissionedRegionServers();
return allServers.stream()
.filter(s -> !decommissionedRegionServers.contains(s))
.collect(ImmutableList.toImmutableList());
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand All @@ -19,7 +19,6 @@

import static org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil.findException;
import static org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil.isMetaClearingException;

import java.util.Arrays;
import java.util.function.Consumer;
import java.util.function.Function;
Expand All @@ -45,7 +44,13 @@ private AsyncRegionLocatorHelper() {
static boolean canUpdateOnError(HRegionLocation loc, HRegionLocation oldLoc) {
// Do not need to update if no such location, or the location is newer, or the location is not
// the same with us
return oldLoc != null && oldLoc.getSeqNum() <= loc.getSeqNum() &&
if (loc == null || loc.getServerName() == null) {
return false;
}
if (oldLoc == null || oldLoc.getServerName() == null) {
return false;
}
return oldLoc.getSeqNum() <= loc.getSeqNum() &&
oldLoc.getServerName().equals(loc.getServerName());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,35 +81,7 @@ public String toString() {

@Override
public EncodedSeeker createSeeker(final HFileBlockDecodingContext decodingCtx) {
return new BufferedEncodedSeeker<SeekerState>(decodingCtx) {
@Override
protected void decodeNext() {
current.keyLength = currentBuffer.getInt();
current.valueLength = currentBuffer.getInt();
current.ensureSpaceForKey();
currentBuffer.get(current.keyBuffer, 0, current.keyLength);
current.valueOffset = currentBuffer.position();
currentBuffer.skip(current.valueLength);
if (includesTags()) {
// Read short as unsigned, high byte first
current.tagsLength = ((currentBuffer.get() & 0xff) << 8) ^ (currentBuffer.get() & 0xff);
currentBuffer.skip(current.tagsLength);
}
if (includesMvcc()) {
current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
} else {
current.memstoreTS = 0;
}
current.nextKvOffset = currentBuffer.position();
}

@Override
protected void decodeFirst() {
currentBuffer.skip(Bytes.SIZEOF_INT);
current.lastCommonPrefix = 0;
decodeNext();
}
};
return new SeekerStateBufferedEncodedSeeker(decodingCtx);
}

@Override
Expand All @@ -123,4 +95,41 @@ protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocat

return buffer;
}

private static class SeekerStateBufferedEncodedSeeker
extends BufferedEncodedSeeker<SeekerState> {

private SeekerStateBufferedEncodedSeeker(HFileBlockDecodingContext decodingCtx) {
super(decodingCtx);
}

@Override
protected void decodeNext() {
current.keyLength = currentBuffer.getInt();
current.valueLength = currentBuffer.getInt();
current.ensureSpaceForKey();
currentBuffer.get(current.keyBuffer, 0, current.keyLength);
current.valueOffset = currentBuffer.position();
currentBuffer.skip(current.valueLength);
if (includesTags()) {
// Read short as unsigned, high byte first
current.tagsLength = ((currentBuffer.get() & 0xff) << 8) ^ (currentBuffer.get() & 0xff);
currentBuffer.skip(current.tagsLength);
}
if (includesMvcc()) {
current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
} else {
current.memstoreTS = 0;
}
current.nextKvOffset = currentBuffer.position();
}

@Override
protected void decodeFirst() {
currentBuffer.skip(Bytes.SIZEOF_INT);
current.lastCommonPrefix = 0;
decodeNext();
}
}

}
Loading