Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
7daaec0
HDDS-8477. Unit test for Snapdiff using tombstone entries
swamirishi May 23, 2023
a51fb2b
HDDS-8477: Address review comments
swamirishi May 24, 2023
607c185
HDDS-8477: Fix checkstyle
swamirishi May 24, 2023
0611a6e
Merge remote-tracking branch 'apache/master' into HEAD
swamirishi May 24, 2023
964d5a6
HDDS-8477: Fix checkstyle
swamirishi May 24, 2023
ecd5d74
HDDS-8477: Address review comments
swamirishi May 27, 2023
7a1d59e
HDDS-8477: Address review comments
swamirishi May 27, 2023
7b79413
HDDS-8477. Fix testcase failure
swamirishi May 31, 2023
b1b22c0
Merge remote-tracking branch 'apache/master' into HEAD
swamirishi Jun 1, 2023
8150fe7
HDDS-8477. Fix test case
swamirishi Jun 1, 2023
5dd6b1b
HDDS-8477. Initilialize counter to avoid null pointer
swamirishi Jun 1, 2023
c6ab89d
HDDS-8477. Fix testcase failure
swamirishi Jun 2, 2023
f2b74a8
HDDS-8477. Fix robot test for snapshot deletion check
swamirishi Jun 2, 2023
0d57996
Merge remote-tracking branch 'apache/master' into HEAD
swamirishi Jun 2, 2023
d0a955f
HDDS-8477. Fix snapshot delete robot test
swamirishi Jun 6, 2023
8e9fc62
Merge remote-tracking branch 'apache/master' into HEAD
swamirishi Jun 6, 2023
e6b9880
HDDS-8477. Address review comments to fix robot test
swamirishi Jun 6, 2023
7d2c15b
HDDS-8477. Address review comments to fix ci
swamirishi Jun 6, 2023
3fac5ab
HDDS-8477. Address review comments
swamirishi Jun 8, 2023
39498aa
HDDS-8477. Add checks to add validation on page response
swamirishi Jun 8, 2023
298e9ac
HDDS-8477. Fix checkstyle issue
swamirishi Jun 8, 2023
10349f3
HDDS-8477. Fix checkstyle issue
swamirishi Jun 8, 2023
746a238
HDDS-8477. Fix checkstyle issue
swamirishi Jun 8, 2023
6d878d4
HDDS-8477. Address review comments
swamirishi Jun 8, 2023
3be8689
HDDS-8477. Address review comments
swamirishi Jun 8, 2023
4c55203
HDDS-8477. Remove loop from robot test
swamirishi Jun 8, 2023
c348853
HDDS-8477. Fix checkstyle for import
swamirishi Jun 8, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ jobs:
run: hadoop-ozone/dev-support/checks/build.sh -Dskip.npx -Dskip.installnpx -Djavac.version=${{ matrix.java }}
env:
OZONE_WITH_COVERAGE: false
CANCEL_NATIVE_VERSION_CHECK: true
SKIP_NATIVE_VERSION_CHECK: true
- name: Delete temporary build artifacts before caching
run: |
#Never cache local artifacts
Expand All @@ -173,12 +173,12 @@ jobs:
steps:
- name: Checkout project
uses: actions/checkout@v3
if: matrix.check != 'bats'
if: matrix.check != 'bats' && matrix.check != 'unit'
- name: Checkout project with history
uses: actions/checkout@v3
with:
fetch-depth: 0
if: matrix.check == 'bats'
if: matrix.check == 'bats' || matrix.check == 'unit'
- name: Cache for maven dependencies
uses: actions/cache@v3
with:
Expand Down Expand Up @@ -337,8 +337,10 @@ jobs:
- flaky
fail-fast: false
steps:
- name: Checkout project
- name: Checkout project with history
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Cache for maven dependencies
uses: actions/cache@v3
with:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ public class NativeLibraryLoader {
LoggerFactory.getLogger(NativeLibraryLoader.class);
public static final int LIBRARY_SHUTDOWN_HOOK_PRIORITY = 1;
private static final String OS = System.getProperty("os.name").toLowerCase();

public static final String NATIVE_LIB_TMP_DIR = "native.lib.tmp.dir";
private Map<String, Boolean> librariesLoaded;
private static volatile NativeLibraryLoader instance;

Expand Down Expand Up @@ -136,7 +138,8 @@ private Optional<File> copyResourceFromJarToTemp(final String libraryName)

// create a temporary file to copy the library to
final File temp = File.createTempFile(libraryName, getLibOsSuffix(),
new File(""));
new File(Optional.ofNullable(System.getProperty(NATIVE_LIB_TMP_DIR))
.orElse("")));
if (!temp.exists()) {
return Optional.empty();
} else {
Expand Down
23 changes: 23 additions & 0 deletions hadoop-hdds/rocksdb-checkpoint-differ/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,10 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<groupId>org.apache.ozone</groupId>
<artifactId>hdds-rocks-native</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</dependency>
</dependencies>

<build>
Expand Down Expand Up @@ -201,5 +205,24 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</plugins>
</build>
</profile>
<profile>
<id>native-testing</id>
<activation>
<property>
<name>rocks_tools_native</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>${maven-surefire-plugin.argLine} @{argLine} -Djava.library.path=${project.parent.basedir}/rocks-native/target/native/rocksdb</argLine>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ public long getEstimatedTotalKeys() throws RocksDBException {
return estimatedTotalKeys;
}

public Stream<String> getKeyStream() throws RocksDBException,
NativeLibraryNotLoadedException, IOException {
public Stream<String> getKeyStream() throws RocksDBException {
// TODO: [SNAPSHOT] Check if default Options and ReadOptions is enough.
final MultipleSstFileIterator<String> itr =
new MultipleSstFileIterator<String>(sstFiles) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ozone.rocksdb.util;

import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.utils.NativeLibraryLoader;
import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException;
import org.apache.hadoop.hdds.utils.db.managed.ManagedEnvOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool;
import org.apache.hadoop.hdds.utils.db.managed.ManagedSstFileWriter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;

import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME;

/**
* ManagedSstFileReader tests.
*/
public class TestManagedSstFileReader {

private static final Logger LOG =
LoggerFactory.getLogger(TestManagedSstFileReader.class);

// Key prefix containing all characters, to check if all characters can be
// written & read from rocksdb through SSTDumptool
private static final String KEY_PREFIX = IntStream.range(0, 256).boxed()
.map(i -> String.format("%c", i))
.collect(Collectors.joining(""));

private String createRandomSSTFile(TreeMap<String, Integer> keys)
throws IOException, RocksDBException {
File file = File.createTempFile("tmp_sst_file", ".sst");
file.deleteOnExit();

try (ManagedOptions managedOptions = new ManagedOptions();
ManagedEnvOptions managedEnvOptions = new ManagedEnvOptions();
ManagedSstFileWriter sstFileWriter = new ManagedSstFileWriter(
managedEnvOptions, managedOptions)) {
sstFileWriter.open(file.getAbsolutePath());
for (Map.Entry<String, Integer> entry : keys.entrySet()) {
byte[] keyByte = StringUtils.string2Bytes(entry.getKey());
if (entry.getValue() == 0) {
sstFileWriter.delete(keyByte);
} else {
sstFileWriter.put(keyByte, keyByte);
}
}
sstFileWriter.finish();
}
return file.getAbsolutePath();
}

private Map<String, Integer> createKeys(int startRange, int endRange) {
return IntStream.range(startRange, endRange).boxed()
.collect(Collectors.toMap(i -> KEY_PREFIX + i,
i -> i % 2));
}

private Pair<Map<String, Integer>, List<String>> createDummyData(
int numberOfFiles) throws RocksDBException, IOException {
List<String> files = new ArrayList<>();
int numberOfKeysPerFile = 1000;
Map<String, Integer> keys = new HashMap<>();
int cnt = 0;
for (int i = 0; i < numberOfFiles; i++) {
TreeMap<String, Integer> fileKeys = new TreeMap<>(createKeys(cnt,
cnt + numberOfKeysPerFile));
cnt += fileKeys.size();
String tmpSSTFile = createRandomSSTFile(fileKeys);
files.add(tmpSSTFile);
keys.putAll(fileKeys);
}
return Pair.of(keys, files);
}

@ParameterizedTest
@ValueSource(ints = {0, 1, 2, 3, 7, 10})
public void testGetKeyStream(int numberOfFiles)
throws RocksDBException, IOException {
Pair<Map<String, Integer>, List<String>> data =
createDummyData(numberOfFiles);
List<String> files = data.getRight();
Map<String, Integer> keys = data.getLeft();
try (Stream<String> keyStream =
new ManagedSstFileReader(files).getKeyStream()) {
keyStream.forEach(key -> {
Assertions.assertEquals(keys.get(key), 1);
keys.remove(key);
});
keys.values().forEach(val -> Assertions.assertEquals(0, val));
}
}

@ParameterizedTest
@ValueSource(ints = {0, 1, 2, 3, 7, 10})
public void testGetKeyStreamWithTombstone(int numberOfFiles)
throws RocksDBException, IOException, NativeLibraryNotLoadedException {
NativeLibraryLoader.getInstance()
.loadLibrary(ROCKS_TOOLS_NATIVE_LIBRARY_NAME);
Pair<Map<String, Integer>, List<String>> data =
createDummyData(numberOfFiles);
List<String> files = data.getRight();
Map<String, Integer> keys = data.getLeft();
ExecutorService executorService = new ThreadPoolExecutor(0,
1, 60, TimeUnit.SECONDS,
new SynchronousQueue<>(), new ThreadFactoryBuilder()
.setNameFormat("snapshot-diff-manager-sst-dump-tool-TID-%d")
.build(), new ThreadPoolExecutor.DiscardPolicy());
ManagedSSTDumpTool sstDumpTool =
new ManagedSSTDumpTool(executorService, 256);

try (Stream<String> keyStream = new ManagedSstFileReader(files)
.getKeyStreamWithTombstone(sstDumpTool)) {
keyStream.forEach(keys::remove);
Assertions.assertEquals(0, keys.size());
} finally {
executorService.shutdown();
}

}
}
2 changes: 1 addition & 1 deletion hadoop-ozone/dev-support/checks/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ else
MAVEN_OPTIONS="${MAVEN_OPTIONS} -Djacoco.skip"
fi

if [[ "${CANCEL_NATIVE_VERSION_CHECK}" != "true" ]]; then
if [[ "${SKIP_NATIVE_VERSION_CHECK}" != "true" ]]; then
NATIVE_MAVEN_OPTIONS="-Drocks_tools_native"
. "$DIR/native_check.sh"
init_native_maven_opts
Expand Down
10 changes: 9 additions & 1 deletion hadoop-ozone/dev-support/checks/junit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ set -u -o pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "$DIR/../../.." || exit 1

: ${SKIP_NATIVE_VERSION_CHECK:="false"}
: ${CHECK:="unit"}
: ${ITERATIONS:="1"}
: ${OZONE_WITH_COVERAGE:="false"}
Expand All @@ -29,7 +30,7 @@ if [[ ${ITERATIONS} -le 0 ]]; then
fi

export MAVEN_OPTS="-Xmx4096m $MAVEN_OPTS"
MAVEN_OPTIONS='-B -Dskip.npx -Dskip.installnpx --no-transfer-progress'
MAVEN_OPTIONS='-B -Dskip.npx -Dskip.installnpx -Dnative.lib.tmp.dir=/tmp --no-transfer-progress'

if [[ "${OZONE_WITH_COVERAGE}" != "true" ]]; then
MAVEN_OPTIONS="${MAVEN_OPTIONS} -Djacoco.skip"
Expand All @@ -41,6 +42,13 @@ else
MAVEN_OPTIONS="${MAVEN_OPTIONS} --fail-at-end"
fi

if [[ "${SKIP_NATIVE_VERSION_CHECK}" == "false" ]]; then
NATIVE_MAVEN_OPTIONS="-Drocks_tools_native"
. "$DIR/native_check.sh"
init_native_maven_opts
MAVEN_OPTIONS="${MAVEN_OPTIONS} ${NATIVE_MAVEN_OPTIONS}"
fi

if [[ "${CHECK}" == "integration" ]] || [[ ${ITERATIONS} -gt 1 ]]; then
mvn ${MAVEN_OPTIONS} -DskipTests clean install
fi
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ Attempt to snapshotDiff when snapshot feature is disabled
${output} = Execute and checkrc ozone sh snapshot snapshotDiff /snapvolume-2/snapbucket-1 snapshot1 snapshot2 255
Should contain ${output} NOT_SUPPORTED_OPERATION

# HDDS-8732
#Delete snapshot
# [Tags] finalized-snapshot-tests
# ${output} = Execute ozone sh snapshot delete /snapvolume-1/snapbucket-1 snapshot1
# Should not contain ${output} Failed
# ${output} = Execute ozone sh snapshot ls /snapvolume-1/snapbucket-1
# Should contain ${output} SNAPSHOT_DELETED
Delete snapshot
[Tags] finalized-snapshot-tests
${output} = Execute ozone sh snapshot delete /snapvolume-1/snapbucket-1 snapshot1
Should not contain ${output} Failed

${output} = Execute ozone sh snapshot ls /snapvolume-1/snapbucket-1 | jq '[.[] | select(.name == "snapshot1") | .snapshotStatus] | if length > 0 then .[] else "SNAPSHOT_DELETED" end'
Should contain ${output} SNAPSHOT_DELETED

Attempt to delete when snapshot feature is disabled
[Tags] pre-finalized-snapshot-tests
Expand Down
Loading