From c097f35e4a3103322a9de875a56f28ec2113fd6d Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 24 Feb 2023 08:45:39 -0800 Subject: [PATCH 01/51] HDDS-8028: JNI for RocksDB SST Dump tool --- hadoop-hdds/framework/CMakeLists.txt | 19 ++ .../utils/db/managed/ManagedRocksObject.java | 17 ++ hadoop-hdds/pom.xml | 1 + hadoop-hdds/rocks-native/pom.xml | 233 ++++++++++++++++++ hadoop-hdds/rocks-native/src/CMakeLists.txt | 60 +++++ .../hadoop/hdds/utils/NativeConstants.java | 31 +++ .../hdds/utils/NativeLibraryLoader.java | 136 ++++++++++ .../NativeLibraryNotLoadedException.java | 30 +++ .../utils/db/managed/ManagedSSTDumpTool.java | 58 +++++ .../hdds/utils/db/managed/package-info.java | 24 ++ .../hadoop/hdds/utils/package-info.java | 24 ++ .../src/main/native/SSTDumpTool.cpp | 37 +++ hadoop-ozone/common/CMakeLists.txt | 11 + hadoop-ozone/common/pom.xml | 4 + pom.xml | 10 + 15 files changed, 695 insertions(+) create mode 100644 hadoop-hdds/framework/CMakeLists.txt create mode 100644 hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java create mode 100644 hadoop-hdds/rocks-native/pom.xml create mode 100644 hadoop-hdds/rocks-native/src/CMakeLists.txt create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeConstants.java create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryNotLoadedException.java create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java create mode 100644 hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp create mode 100644 hadoop-ozone/common/CMakeLists.txt diff --git a/hadoop-hdds/framework/CMakeLists.txt b/hadoop-hdds/framework/CMakeLists.txt new file mode 100644 index 000000000000..b9f7d0087ec4 --- /dev/null +++ b/hadoop-hdds/framework/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 3.1 FATAL_ERROR) +project(ozone_native) +set(CMAKE_BUILD_TYPE Release) +find_package(JNI REQUIRED) +include_directories(${JNI_INCLUDE_DIRS}) +set(CMAKE_CXX_STANDARD ${CMAKE_STANDARDS}) +set(linked_libraries "") +if(NOT GENERATED_JAVAH) + message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH") +endif() +include_directories(${GENERATED_JAVAH}) +if(${SST_DUMP_INCLUDE}) + include_directories(${ROCKSDB_HEADERS}) + link_directories(${ROCKSDB_LIB}) + set(SOURCE_FILES ${NATIVE_DIR}/SSTDumpTool.cpp) + set(linked_libraries ${linked_libraries} rocksdbjni) +endif() +add_library(ozone_native SHARED ${SOURCE_FILES}) +target_link_libraries(ozone_native ${linked_libraries}) \ No newline at end of file diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java new file mode 100644 index 000000000000..728c72244996 --- /dev/null +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java @@ -0,0 +1,17 @@ +package org.apache.hadoop.hdds.utils.db.managed; + +import org.rocksdb.RocksObject; +/** + * Managed RocksObject. + */ +public abstract class ManagedRocksObject extends RocksObject { + protected ManagedRocksObject(long nativeHandle) { + super(nativeHandle); + } + + @Override + protected void finalize() throws Throwable { + ManagedRocksObjectUtils.assertClosed(this); + super.finalize(); + } +} diff --git a/hadoop-hdds/pom.xml b/hadoop-hdds/pom.xml index e93b621c7bca..1aaa21f9f9f6 100644 --- a/hadoop-hdds/pom.xml +++ b/hadoop-hdds/pom.xml @@ -48,6 +48,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> config test-utils erasurecode + rocks-native diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml new file mode 100644 index 000000000000..33d86da36acd --- /dev/null +++ b/hadoop-hdds/rocks-native/pom.xml @@ -0,0 +1,233 @@ + + + + + hdds + org.apache.ozone + 1.4.0-SNAPSHOT + + 4.0.0 + Apache Ozone HDDS Rocks + hdds-rocks-native + + + + org.apache.ozone + hdds-server-framework + + + + 8 + 8 + 23 + true + + + + + + + com.googlecode.maven-download-plugin + download-maven-plugin + + + rocksdb source download + generate-resources + + wget + + + https://github.com/facebook/rocksdb/archive/refs/tags/v${rocksdb.version}.tar.gz + rocksdb-v${rocksdb.version}.tar.gz + ${project.build.directory}/rocksdb + + + + zlib source download + generate-resources + + wget + + + https://zlib.net/zlib-${zlib.version}.tar.gz + zlib-${zlib.version}.tar.gz + ${project.build.directory}/zlib + + + + bzip2 source download + generate-resources + + wget + + + https://sourceware.org/pub/bzip2/bzip2-${bzip2.version}.tar.gz + bzip2-v${bzip2.version}.tar.gz + ${project.build.directory}/bzip2 + + + + + + maven-antrun-plugin + + + unzip-artifact + generate-resources + + + + + + + + + run + + + + build-zlib + generate-resources + + + + + + + + + + + run + + + + build-bzip2 + generate-resources + + + + + + + run + + + + build-rocksjava + generate-resources + + + + + + + + + + + > + + + + run + + + + + + org.codehaus.mojo + native-maven-plugin + + + compile + + javah + + + ${env.JAVA_HOME}/bin/javah + + org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool + + ${project.build.directory}/native/javah + + + + + + org.apache.hadoop + hadoop-maven-plugins + + + cmake-compile + compile + cmake-compile + + ${basedir}/src + + ${project.build.directory}/native/javah + ${basedir}/src/main/native + ${sstDump.include} + ${cmake.standards} + ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version}/include + ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} + ${project.build.directory}/zlib/zlib-${zlib.version} + ${project.build.directory}/bzip2/bzip2-${bzip2.version} + + + + + + + maven-antrun-plugin + + + copy-lib-file + prepare-package + + + + + + + + + run + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + **/*.class + **/lib*.dylib + **/lib*.so + **/lib*.jnilib + **/lib*.dll + + + + + + + + + \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/CMakeLists.txt b/hadoop-hdds/rocks-native/src/CMakeLists.txt new file mode 100644 index 000000000000..ce61a4345aa5 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/CMakeLists.txt @@ -0,0 +1,60 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# CMake configuration. +# + +cmake_minimum_required(VERSION 3.1 FATAL_ERROR) +project(ozone_native) +set(CMAKE_BUILD_TYPE Release) +find_package(JNI REQUIRED) +include_directories(${JNI_INCLUDE_DIRS}) +set(CMAKE_CXX_STANDARD ${CMAKE_STANDARDS}) +set(linked_libraries "") +if(NOT GENERATED_JAVAH) + message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH") +endif() +include_directories(${GENERATED_JAVAH}) +if(${SST_DUMP_INCLUDE}) + include_directories(${ROCKSDB_HEADERS}) + set(SOURCE_FILES ${NATIVE_DIR}/SSTDumpTool.cpp) + ADD_LIBRARY(rocksdb STATIC IMPORTED) + set_target_properties( + rocksdb + PROPERTIES + IMPORTED_LOCATION ${ROCKSDB_LIB}/librocksdb_debug.a) + ADD_LIBRARY(rocks_tools STATIC IMPORTED) + set_target_properties( + rocks_tools + PROPERTIES + IMPORTED_LOCATION ${ROCKSDB_LIB}/librocksdb_tools_debug.a) + ADD_LIBRARY(bz2 STATIC IMPORTED) + set_target_properties( + bz2 + PROPERTIES + IMPORTED_LOCATION ${BZIP2_LIB}/libbz2.a) + ADD_LIBRARY(zlib STATIC IMPORTED) + set_target_properties( + zlib + PROPERTIES + IMPORTED_LOCATION ${ZLIB_LIB}/libz.a) + set(linked_libraries ${linked_libraries} bz2 zlib rocks_tools rocksdb) +endif() +add_library(ozone_rocksdb_tools SHARED ${SOURCE_FILES}) +target_link_libraries(ozone_rocksdb_tools ${linked_libraries}) \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeConstants.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeConstants.java new file mode 100644 index 000000000000..d3121144d37a --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeConstants.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.utils; + +/** + * Native Constants. + */ +public final class NativeConstants { + + private NativeConstants() { + + } + public static final String ROCKS_TOOLS_NATIVE_LIBRARY_NAME + = "ozone_rocksdb_tools"; +} diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java new file mode 100644 index 000000000000..0f2ce9b0fee7 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.utils; + + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Class to load Native Libraries. + */ +public class NativeLibraryLoader { + private static final String OS = System.getProperty("os.name").toLowerCase(); + private Map librariesLoaded; + private static NativeLibraryLoader instance; + + public NativeLibraryLoader(final Map librariesLoaded) { + this.librariesLoaded = librariesLoaded; + } + + public static NativeLibraryLoader getInstance() { + if (instance == null) { + synchronized (NativeLibraryLoader.class) { + if (instance == null) { + instance = new NativeLibraryLoader(new ConcurrentHashMap<>()); + } + } + } + return instance; + } + + public static String getJniLibraryFileName(String libraryName) { + return appendLibOsSuffix("lib" + libraryName); + } + + public static boolean isMac() { + return OS.contains("mac"); + } + + public static boolean isWindows() { + return OS.contains("win"); + } + + private static String getLibOsSuffix() { + if (isMac()) { + return ".dylib"; + } else if (isWindows()) { + return ".dll"; + } + return ".so"; + } + private static String appendLibOsSuffix(String libraryFileName) { + return libraryFileName + getLibOsSuffix(); + } + + public boolean isLibraryLoaded(final String libraryName) { + return librariesLoaded.getOrDefault(libraryName, false); + } + + public synchronized boolean loadLibrary(final String libraryName) { + if (isLibraryLoaded(libraryName)) { + return true; + } + boolean loaded = false; + try { + System.loadLibrary(libraryName); + loaded = true; + } catch (final UnsatisfiedLinkError ule) { + + } + if (!loaded) { + try { + Optional file = copyResourceFromJarToTemp(libraryName); + if (file.isPresent()) { + System.load(file.get().getAbsolutePath()); + loaded = true; + } + + } catch (IOException e) { + + } + + } + this.librariesLoaded.put(libraryName, loaded); + return isLibraryLoaded(libraryName); + } + private Optional copyResourceFromJarToTemp(final String libraryName) + throws IOException { + final String libraryFileName = getJniLibraryFileName(libraryName); + InputStream is = null; + try { + is = getClass().getClassLoader().getResourceAsStream(libraryFileName); + if (is == null) { + return Optional.empty(); + } + + // create a temporary file to copy the library to + final File temp = File.createTempFile(libraryName, getLibOsSuffix()); + if (!temp.exists()) { + return Optional.empty(); + } else { + temp.deleteOnExit(); + } + + Files.copy(is, temp.toPath(), StandardCopyOption.REPLACE_EXISTING); + return Optional.ofNullable(temp); + + } finally { + if (is != null) { + is.close(); + } + } + } +} diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryNotLoadedException.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryNotLoadedException.java new file mode 100644 index 000000000000..b4313a607dce --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryNotLoadedException.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.utils; + +/** + Exception when native library not loaded. + */ +public class NativeLibraryNotLoadedException extends Exception { + public NativeLibraryNotLoadedException(String libraryName) { + super(String.format("Unable to load library %s from both " + + "java.library.path & resource file %s from jar.", libraryName, + NativeLibraryLoader.getJniLibraryFileName(libraryName))); + } +} diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java new file mode 100644 index 000000000000..2913e3c48213 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.utils.db.managed; + +import org.apache.hadoop.hdds.utils.NativeLibraryLoader; +import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; + +import java.util.Map; + +import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; + +/** + * JNI for RocksDB SSTDumpTool. + */ +public class ManagedSSTDumpTool { + + static { + NativeLibraryLoader.getInstance() + .loadLibrary(ROCKS_TOOLS_NATIVE_LIBRARY_NAME); + } + + public ManagedSSTDumpTool() throws NativeLibraryNotLoadedException { + if (!NativeLibraryLoader.getInstance() + .isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { + throw new NativeLibraryNotLoadedException( + ROCKS_TOOLS_NATIVE_LIBRARY_NAME); + } + + } + + public void run(String[] args, ManagedOptions options) { + this.runInternal(args, options.getNativeHandle()); + } + + public void run(Map args, ManagedOptions options) { + this.run(args.entrySet().stream().map(e -> "--" + + (e.getValue() == null || e.getValue().isEmpty() ? e.getKey() : + e.getKey() + "=" + e.getValue())).toArray(String[]::new), options); + } + + private native void runInternal(String[] args, long optionsNativeHandle); +} diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java new file mode 100644 index 000000000000..310afc30ca80 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Annotation processors used at compile time by the Ozone project to validate + * internal annotations and related code as needed, if needed. + */ + +package org.apache.hadoop.hdds.utils.db.managed; \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java new file mode 100644 index 000000000000..2519c13b256a --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Annotation processors used at compile time by the Ozone project to validate + * internal annotations and related code as needed, if needed. + */ + +package org.apache.hadoop.hdds.utils; \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp new file mode 100644 index 000000000000..e9ac71bbd31f --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool.h" +#include "rocksdb/options.h" +#include "rocksdb/sst_dump_tool.h" +#include "string" + +void Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray, + jlong options_native_handle) { + auto* options = reinterpret_cast(options_native_handle); + ROCKSDB_NAMESPACE::SSTDumpTool dumpTool; + int length = env->GetArrayLength(argsArray); + const char* args[length + 1]; + args[0] = strdup("./sst_dump"); + for(int i = 0; i < env->GetArrayLength(argsArray); i++) { + + args[i+1] = (char*)env->GetStringUTFChars((jstring)env-> + GetObjectArrayElement(argsArray, (jsize)i), JNI_FALSE); + } + dumpTool.Run(length + 1, args, *options); +} \ No newline at end of file diff --git a/hadoop-ozone/common/CMakeLists.txt b/hadoop-ozone/common/CMakeLists.txt new file mode 100644 index 000000000000..880a97237025 --- /dev/null +++ b/hadoop-ozone/common/CMakeLists.txt @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 3.24) +project(sst_file_dump) +set(CMAKE_BUILD_TYPE Release) +find_package(JNI REQUIRED) +include_directories(${JNI_INCLUDE_DIRS}) +set(CMAKE_CXX_STANDARD 23) +include_directories(/opt/homebrew/Cellar/rocksdb/7.9.2/include) +link_directories(/opt/homebrew/Cellar/rocksdb/7.9.2/lib) +set(SOURCE_FILES src/main/native/SSTDumpTool.cpp src/main/native/SSTDumpTool.h src/main/native/cplusplus_to_java_convert.h) +add_library(sst_file_dump SHARED ${SOURCE_FILES}) +target_link_libraries(sst_file_dump rocksdb) diff --git a/hadoop-ozone/common/pom.xml b/hadoop-ozone/common/pom.xml index b3ebf71fc016..0302519b5863 100644 --- a/hadoop-ozone/common/pom.xml +++ b/hadoop-ozone/common/pom.xml @@ -107,6 +107,10 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> org.junit.jupiter junit-jupiter-params + + org.apache.ozone + hdds-server-framework + diff --git a/pom.xml b/pom.xml index 8927a0757c2b..842b6023608b 100644 --- a/pom.xml +++ b/pom.xml @@ -126,6 +126,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs 2.6.0 1.4 1.6 + 1.6.8 ${project.build.directory}/test-dir ${test.build.dir} @@ -292,6 +293,10 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs 1.14.0 2.0.0 4.2.0 + 1.0.8 + 1.2.13 + + @@ -1533,6 +1538,11 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs curator-client ${curator.version} + + com.googlecode.maven-download-plugin + download-maven-plugin + ${download-maven-plugin.version} + From b14834b9fa00a712c8985f57ab5a95d433d2fcad Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 24 Feb 2023 08:51:41 -0800 Subject: [PATCH 02/51] HDDS-8028: Remove duplicate files --- hadoop-hdds/framework/CMakeLists.txt | 19 ------------------- hadoop-ozone/common/CMakeLists.txt | 11 ----------- 2 files changed, 30 deletions(-) delete mode 100644 hadoop-hdds/framework/CMakeLists.txt delete mode 100644 hadoop-ozone/common/CMakeLists.txt diff --git a/hadoop-hdds/framework/CMakeLists.txt b/hadoop-hdds/framework/CMakeLists.txt deleted file mode 100644 index b9f7d0087ec4..000000000000 --- a/hadoop-hdds/framework/CMakeLists.txt +++ /dev/null @@ -1,19 +0,0 @@ -cmake_minimum_required(VERSION 3.1 FATAL_ERROR) -project(ozone_native) -set(CMAKE_BUILD_TYPE Release) -find_package(JNI REQUIRED) -include_directories(${JNI_INCLUDE_DIRS}) -set(CMAKE_CXX_STANDARD ${CMAKE_STANDARDS}) -set(linked_libraries "") -if(NOT GENERATED_JAVAH) - message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH") -endif() -include_directories(${GENERATED_JAVAH}) -if(${SST_DUMP_INCLUDE}) - include_directories(${ROCKSDB_HEADERS}) - link_directories(${ROCKSDB_LIB}) - set(SOURCE_FILES ${NATIVE_DIR}/SSTDumpTool.cpp) - set(linked_libraries ${linked_libraries} rocksdbjni) -endif() -add_library(ozone_native SHARED ${SOURCE_FILES}) -target_link_libraries(ozone_native ${linked_libraries}) \ No newline at end of file diff --git a/hadoop-ozone/common/CMakeLists.txt b/hadoop-ozone/common/CMakeLists.txt deleted file mode 100644 index 880a97237025..000000000000 --- a/hadoop-ozone/common/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 3.24) -project(sst_file_dump) -set(CMAKE_BUILD_TYPE Release) -find_package(JNI REQUIRED) -include_directories(${JNI_INCLUDE_DIRS}) -set(CMAKE_CXX_STANDARD 23) -include_directories(/opt/homebrew/Cellar/rocksdb/7.9.2/include) -link_directories(/opt/homebrew/Cellar/rocksdb/7.9.2/lib) -set(SOURCE_FILES src/main/native/SSTDumpTool.cpp src/main/native/SSTDumpTool.h src/main/native/cplusplus_to_java_convert.h) -add_library(sst_file_dump SHARED ${SOURCE_FILES}) -target_link_libraries(sst_file_dump rocksdb) From 68b98c4712a8cd7cdfa1d845adab002c0d1cf241 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 24 Feb 2023 08:53:18 -0800 Subject: [PATCH 03/51] HDDS-8028: Add doc --- .../utils/db/managed/ManagedRocksObject.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java index 728c72244996..396949a8643e 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hdds.utils.db.managed; import org.rocksdb.RocksObject; From 3e9db6e4314d332fd5c84c33b1b2024f5524e675 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 25 Feb 2023 22:43:49 -0800 Subject: [PATCH 04/51] HDDS-8028: Link Compresion Libraries while building Rocksdb --- hadoop-hdds/rocks-native/pom.xml | 103 ++++++++++++++++-- hadoop-hdds/rocks-native/src/CMakeLists.txt | 17 ++- .../utils/db/managed/ManagedSSTDumpTool.java | 8 ++ pom.xml | 4 +- 4 files changed, 121 insertions(+), 11 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 33d86da36acd..455f1e00a4b9 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -46,7 +46,7 @@ rocksdb source download - generate-resources + generate-sources wget @@ -58,7 +58,7 @@ zlib source download - generate-resources + generate-sources wget @@ -70,7 +70,7 @@ bzip2 source download - generate-resources + generate-sources wget @@ -80,28 +80,73 @@ ${project.build.directory}/bzip2 + + lz4 source download + generate-sources + + wget + + + https://github.com/lz4/lz4/archive/refs/tags/v${lz4.version}.tar.gz + lz4-v${lz4.version}.tar.gz + ${project.build.directory}/lz4 + + + + snappy source download + generate-sources + + wget + + + https://github.com/google/snappy/archive/refs/tags/${snappy.version}.tar.gz + snappy-v${snappy.version}.tar.gz + ${project.build.directory}/snappy + + + + zstd source download + generate-sources + + wget + + + https://github.com/facebook/zstd/archive/refs/tags/v${zstd.version}.tar.gz + zstd-v${zstd.version}.tar.gz + ${project.build.directory}/zstd + + + - maven-antrun-plugin + maven-antrun-plugin unzip-artifact - generate-resources + generate-sources + + + run + + + + maven-antrun-plugin + build-zlib - generate-resources + generate-sources @@ -117,7 +162,7 @@ build-bzip2 - generate-resources + generate-sources @@ -127,6 +172,30 @@ run + + build-lz4 + generate-sources + + + + + + + run + + + + build-zstd + generate-sources + + + + + + + run + + build-rocksjava generate-resources @@ -138,6 +207,8 @@ + + > @@ -173,11 +244,22 @@ hadoop-maven-plugins - cmake-compile + cmake-compile-snappy + generate-sources + cmake-compile + + ${project.build.directory}/snappy/snappy-${snappy.version} + ${project.build.directory}/snappy/lib + + + + + cmake-compile-rockstools compile cmake-compile ${basedir}/src + ${project.build.directory}/native/rocksdb ${project.build.directory}/native/javah ${basedir}/src/main/native @@ -187,6 +269,9 @@ ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} ${project.build.directory}/zlib/zlib-${zlib.version} ${project.build.directory}/bzip2/bzip2-${bzip2.version} + ${project.build.directory}/lz4/lz4-${lz4.version}/lib + ${project.build.directory}/snappy/lib + ${project.build.directory}/zstd/zstd-${zstd.version}/lib @@ -201,7 +286,7 @@ - + diff --git a/hadoop-hdds/rocks-native/src/CMakeLists.txt b/hadoop-hdds/rocks-native/src/CMakeLists.txt index ce61a4345aa5..a49a49735fd5 100644 --- a/hadoop-hdds/rocks-native/src/CMakeLists.txt +++ b/hadoop-hdds/rocks-native/src/CMakeLists.txt @@ -54,7 +54,22 @@ if(${SST_DUMP_INCLUDE}) zlib PROPERTIES IMPORTED_LOCATION ${ZLIB_LIB}/libz.a) - set(linked_libraries ${linked_libraries} bz2 zlib rocks_tools rocksdb) + ADD_LIBRARY(lz4 STATIC IMPORTED) + set_target_properties( + lz4 + PROPERTIES + IMPORTED_LOCATION ${LZ4_LIB}/liblz4.a) + ADD_LIBRARY(snappy STATIC IMPORTED) + set_target_properties( + snappy + PROPERTIES + IMPORTED_LOCATION ${SNAPPY_LIB}/libsnappy.a) + ADD_LIBRARY(zstd STATIC IMPORTED) + set_target_properties( + zstd + PROPERTIES + IMPORTED_LOCATION ${ZSTD_LIB}/libzstd.a) + set(linked_libraries ${linked_libraries} bz2 zlib rocks_tools rocksdb lz4 snappy zstd) endif() add_library(ozone_rocksdb_tools SHARED ${SOURCE_FILES}) target_link_libraries(ozone_rocksdb_tools ${linked_libraries}) \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index 2913e3c48213..2eb1d7352280 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hdds.utils.NativeLibraryLoader; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; +import java.util.HashMap; import java.util.Map; import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; @@ -55,4 +56,11 @@ public void run(Map args, ManagedOptions options) { } private native void runInternal(String[] args, long optionsNativeHandle); + + public static void main(String[] args) throws NativeLibraryNotLoadedException { + Map commandOpts = new HashMap<>(); + commandOpts.put("file","/Users/sbalachandran/Documents/code/dummyrocks/rocks"); + commandOpts.put("command","scan"); + new ManagedSSTDumpTool().run(commandOpts, new ManagedOptions()); + } } diff --git a/pom.xml b/pom.xml index 842b6023608b..7851f65d4134 100644 --- a/pom.xml +++ b/pom.xml @@ -295,7 +295,9 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs 4.2.0 1.0.8 1.2.13 - + 1.9.3 + 1.1.8 + 1.4.9 From 68550ff50c00a4a6e6e43b4238994c32ffa3789d Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Mon, 27 Feb 2023 08:02:07 -0800 Subject: [PATCH 05/51] HDDS-8028: Add SSTDumptool Iterator --- .../hadoop/hdds/utils/HddsServerUtil.java | 72 ++++--- .../hadoop/hdds/utils/MessageQueue.java | 188 +++++++++++++++++ .../db/managed/ManagedSSTDumpIterator.java | 193 ++++++++++++++++++ hadoop-hdds/rocks-native/pom.xml | 4 - .../utils/db/managed/ManagedSSTDumpTool.java | 20 +- .../src/main/native/SSTDumpTool.cpp | 6 +- 6 files changed, 441 insertions(+), 42 deletions(-) create mode 100644 hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java create mode 100644 hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java index 33d8c178c72b..3e912d887d80 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java @@ -17,22 +17,8 @@ package org.apache.hadoop.hdds.utils; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Optional; -import java.util.OptionalInt; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; - import com.google.common.base.Strings; +import com.google.common.collect.Lists; import com.google.protobuf.BlockingService; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveOutputStream; @@ -63,6 +49,25 @@ import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.security.UserGroupInformation; +import org.rocksdb.RocksDBException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.apache.hadoop.hdds.DFSConfigKeysLegacy.DFS_DATANODE_DATA_DIR_KEY; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL; @@ -78,23 +83,19 @@ import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_COUNT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_COUNT_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL_DEFAULT; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL_DEFAULT; import static org.apache.hadoop.hdds.server.ServerUtils.sanitizeUserArgs; import static org.apache.hadoop.ozone.OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR; -import org.rocksdb.RocksDBException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Hdds stateless helper functions for server side components. */ @@ -583,4 +584,29 @@ public static IOException toIOException(String msg, RocksDBException e) { public static void addSuppressedLoggingExceptions(RPC.Server server) { server.addSuppressedLoggingExceptions(ServerNotLeaderException.class); } + + /** + * Build javaProcessBuilder. + * @param jvmArgs + * @param classpath + * @param className + * @param args + * @return ProcessBuilder + */ + public static ProcessBuilder getJavaProcess(List jvmArgs, + String classpath, + String className, + List args) { + String javaHome = System.getProperty("java.home"); + String javaBin = String.format("%1$s%2$sbin%2$sjava", + javaHome, File.separator); + List command = Lists.newArrayList(); + command.add(javaBin); + command.addAll(jvmArgs); + command.add("-cp"); + command.add(classpath); + command.add(className); + command.addAll(args); + return new ProcessBuilder(command); + } } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java new file mode 100644 index 000000000000..5f2292141e45 --- /dev/null +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java @@ -0,0 +1,188 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.hdds.utils; + +import org.apache.commons.io.input.buffer.CircularByteBuffer; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Message Queue to Pipe output from one output stream to another inputstream. + */ +public class MessageQueue { + private CircularByteBuffer byteBuffer; + private AtomicBoolean isRunning; + + private MessageOutputStream messageOutputStream; + private MessageInputStream messageInputStream; + + public MessageQueue(int bufferSize, long pollIntervalMillis) { + this.pollIntervalMillis = pollIntervalMillis; + init(bufferSize); + } + + private void init(int bufferSize) { + this.byteBuffer = new CircularByteBuffer(bufferSize); + this.isRunning = new AtomicBoolean(false); + this.messageInputStream = new MessageInputStream(this); + this.messageOutputStream = new MessageOutputStream(this); + } + + public void start() { + this.isRunning.set(true); + } + + public void stop() { + this.isRunning.set(false); + } + + public MessageOutputStream getMessageOutputStream() { + return messageOutputStream; + } + + public MessageInputStream getMessageInputStream() { + return messageInputStream; + } + + private long pollIntervalMillis; + + public boolean isRunning() { + return isRunning.get(); + } + + private long getPollIntervalMillis() { + return pollIntervalMillis; + } + + private boolean hasSpace(int requiredLength) { + return this.byteBuffer.hasSpace(requiredLength); + } + + private boolean hasBytes() { + return this.byteBuffer.hasBytes(); + } + + private int getCurrentNumberOfBytes() { + return this.byteBuffer.getCurrentNumberOfBytes(); + } + + private void add(byte[] b, int off, int len) { + this.byteBuffer.add(b, off, len); + } + + private int read(byte[] b, int off, int len) { + this.byteBuffer.read(b, off, len); + return len; + } + + private static T callWithLock(Lock lock, Callable callable) + throws Exception { + lock.lock(); + try { + return callable.call(); + } finally { + lock.unlock(); + } + } + private static final class MessageOutputStream extends OutputStream { + + private MessageQueue messageQueue; + private Lock writeLock; + + private MessageOutputStream(MessageQueue messageQueue) { + this.messageQueue = messageQueue; + this.writeLock = new ReentrantLock(); + + } + + private void waitForBytes(int requiredLength) throws InterruptedException { + while (!this.messageQueue.hasSpace(requiredLength)) { + Thread.sleep(this.messageQueue.getPollIntervalMillis()); + } + } + + + + @Override + public void write(int b) throws IOException { + this.write(new byte[]{(byte) b}); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + try { + if (!this.messageQueue.isRunning()) { + throw new IOException("Message Queue is Closed"); + } + waitForBytes(len); + callWithLock(this.writeLock, () -> { + waitForBytes(len); + this.messageQueue.add(b, off, len); + return true; + }); + } catch (Exception e) { + throw new IOException(e); + } + } + } + private static final class MessageInputStream extends InputStream { + + private MessageQueue messageQueue; + private Lock readLock; + + private MessageInputStream(MessageQueue messageQueue) { + this.messageQueue = messageQueue; + this.readLock = new ReentrantLock(); + } + + private void waitForBytes() throws InterruptedException { + while (!this.messageQueue.hasBytes() && this.messageQueue.isRunning()) { + Thread.sleep(messageQueue.getPollIntervalMillis()); + } + } + + @Override + public int read() throws IOException { + byte[] readByte = new byte[1]; + int numberOfBytesRead = this.read(readByte); + return numberOfBytesRead == -1 ? -1 : (readByte[0] & 0xff); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + try { + return MessageQueue.callWithLock(this.readLock, () -> { + waitForBytes(); + if (!this.messageQueue.isRunning()) { + return -1; + } + return this.messageQueue.read(b, off, Math.min(len, + this.messageQueue.getCurrentNumberOfBytes())); + }); + } catch (Exception e) { + throw new IOException(e); + } + } + } +} diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java new file mode 100644 index 000000000000..073f359399ad --- /dev/null +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -0,0 +1,193 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.hadoop.hdds.utils.db.managed; + +import com.google.common.collect.Lists; +import org.apache.hadoop.hdds.utils.HddsServerUtil; +import org.eclipse.jetty.io.RuntimeIOException; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Iterator to Parse output of RocksDBSSTDumpTool. + */ +public class ManagedSSTDumpIterator implements + Iterator, AutoCloseable { + private Process process; + private static final String SST_DUMP_TOOL_CLASS = + "org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool"; + private static final String PATTERN_REGEX = + "'([^=>]+)' seq:([0-9]+), type:([0-9]+) =>"; + + public static final int PATTERN_KEY_GROUP_NUMBER = 1; + public static final int PATTERN_SEQ_GROUP_NUMBER = 2; + public static final int PATTERN_TYPE_GROUP_NUMBER = 3; + private static final Pattern PATTERN_MATCHER = + Pattern.compile(PATTERN_REGEX); + private BufferedReader processOutput; + private StringBuilder stdoutString; + + private Matcher currentMatcher; + private int prevMatchEndIndex; + private KeyValue currentKey; + private char[] charBuffer; + private KeyValue nextKey; + + private long pollIntervalMillis; + + + public ManagedSSTDumpIterator(String sstDumptoolJarPath, + String sstFilePath, + long pollIntervalMillis) throws IOException { + File sstFile = new File(sstFilePath); + if (!sstFile.exists() || !sstFile.isFile()) { + throw new IOException(String.format("Invalid SST File Path : %s", + sstFile.getAbsolutePath())); + } + this.pollIntervalMillis = pollIntervalMillis; + init(sstFile, sstDumptoolJarPath); + } + + private void init(File sstFile, String sstDumptoolJarPath) + throws IOException { + List args = Lists.newArrayList( + "--file=" + sstFile.getAbsolutePath(), + "--command=scan"); + process = HddsServerUtil.getJavaProcess(Collections.emptyList(), + sstDumptoolJarPath, SST_DUMP_TOOL_CLASS, args).start(); + processOutput = new BufferedReader(new InputStreamReader( + process.getInputStream())); + stdoutString = new StringBuilder(); + currentMatcher = PATTERN_MATCHER.matcher(stdoutString); + charBuffer = new char[8192]; + next(); + } + + private void checkSanityOfProcess() { + if (!process.isAlive() && process.exitValue() != 0) { + throw new RuntimeException("Process Terminated with non zero " + + String.format("exit value %d", process.exitValue())); + } + } + + @Override + public boolean hasNext() { + checkSanityOfProcess(); + return nextKey != null; + } + + @Override + public synchronized KeyValue next() throws RuntimeIOException { + checkSanityOfProcess(); + currentKey = nextKey; + nextKey = null; + while (!currentMatcher.find()) { + try { + if (prevMatchEndIndex != 0) { + stdoutString = new StringBuilder(stdoutString.substring( + prevMatchEndIndex, stdoutString.length())); + prevMatchEndIndex = 0; + currentMatcher = PATTERN_MATCHER.matcher(stdoutString); + } + Thread.sleep(pollIntervalMillis); + int numberOfCharsRead = processOutput.read(charBuffer); + if (numberOfCharsRead < 0) { + if (currentKey != null) { + currentKey.setValue(stdoutString.toString()); + } + return currentKey; + } + stdoutString.append(charBuffer, 0, numberOfCharsRead); + currentMatcher.reset(); + } catch (IOException | InterruptedException e) { + throw new RuntimeIOException(e); + } + } + if (currentKey != null) { + currentKey.setValue(stdoutString.substring(prevMatchEndIndex, + currentMatcher.start())); + } + prevMatchEndIndex = currentMatcher.end(); + nextKey = new KeyValue(currentMatcher.group(1), currentMatcher.group(2), + currentMatcher.group(3)); + return currentKey; + } + + @Override + public synchronized void close() throws Exception { + if (this.process != null) { + this.process.destroyForcibly(); + this.processOutput.close(); + } + } + + /** + * Class containing Parsed KeyValue Record from Sst Dumptool output. + */ + public static final class KeyValue { + private String key; + private Integer sequence; + private Integer type; + + private String value; + + private KeyValue(String key, String sequence, String type) { + this.key = key; + this.sequence = Integer.valueOf(sequence); + this.type = Integer.valueOf(type); + } + + private void setValue(String value) { + this.value = value; + } + + public String getKey() { + return key; + } + + public Integer getSequence() { + return sequence; + } + + public Integer getType() { + return type; + } + + public String getValue() { + return value; + } + + @Override + public String toString() { + return "KeyValue{" + + "key='" + key + '\'' + + ", sequence=" + sequence + + ", type=" + type + + ", value='" + value + '\'' + + '}'; + } + } +} diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 455f1e00a4b9..8f37c21b03b8 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -25,10 +25,6 @@ hdds-rocks-native - - org.apache.ozone - hdds-server-framework - 8 diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index 2eb1d7352280..771c64c720ee 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hdds.utils.NativeLibraryLoader; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; -import java.util.HashMap; +import java.io.FileNotFoundException; import java.util.Map; import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; @@ -45,22 +45,20 @@ public ManagedSSTDumpTool() throws NativeLibraryNotLoadedException { } - public void run(String[] args, ManagedOptions options) { - this.runInternal(args, options.getNativeHandle()); + public void run(String[] args) { + this.runInternal(args); } - public void run(Map args, ManagedOptions options) { + public void run(Map args) { this.run(args.entrySet().stream().map(e -> "--" + (e.getValue() == null || e.getValue().isEmpty() ? e.getKey() : - e.getKey() + "=" + e.getValue())).toArray(String[]::new), options); + e.getKey() + "=" + e.getValue())).toArray(String[]::new)); } - private native void runInternal(String[] args, long optionsNativeHandle); + private native void runInternal(String[] args); - public static void main(String[] args) throws NativeLibraryNotLoadedException { - Map commandOpts = new HashMap<>(); - commandOpts.put("file","/Users/sbalachandran/Documents/code/dummyrocks/rocks"); - commandOpts.put("command","scan"); - new ManagedSSTDumpTool().run(commandOpts, new ManagedOptions()); + public static void main(String[] args) + throws NativeLibraryNotLoadedException, FileNotFoundException { + new ManagedSSTDumpTool().run(args); } } diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index e9ac71bbd31f..a2634ee06f80 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -21,9 +21,7 @@ #include "rocksdb/sst_dump_tool.h" #include "string" -void Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray, - jlong options_native_handle) { - auto* options = reinterpret_cast(options_native_handle); +void Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray) { ROCKSDB_NAMESPACE::SSTDumpTool dumpTool; int length = env->GetArrayLength(argsArray); const char* args[length + 1]; @@ -33,5 +31,5 @@ void Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal args[i+1] = (char*)env->GetStringUTFChars((jstring)env-> GetObjectArrayElement(argsArray, (jsize)i), JNI_FALSE); } - dumpTool.Run(length + 1, args, *options); + dumpTool.Run(length + 1, args); } \ No newline at end of file From 1c68ddc9c3ae3b3e88d7679f599aca363484da92 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 11:03:16 -0800 Subject: [PATCH 06/51] HDDS-8028: Fix dependency lib build --- .../db/managed/ManagedSSTDumpIterator.java | 70 +++++++++++-------- hadoop-hdds/rocks-native/pom.xml | 12 ++-- hadoop-hdds/rocks-native/src/CMakeLists.txt | 3 + .../hdds/utils/NativeLibraryLoader.java | 14 ++-- 4 files changed, 56 insertions(+), 43 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 073f359399ad..b36ca9c94443 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -25,9 +25,12 @@ import java.io.File; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -57,6 +60,7 @@ public class ManagedSSTDumpIterator implements private KeyValue nextKey; private long pollIntervalMillis; + private Lock lock; public ManagedSSTDumpIterator(String sstDumptoolJarPath, @@ -68,6 +72,7 @@ public ManagedSSTDumpIterator(String sstDumptoolJarPath, sstFile.getAbsolutePath())); } this.pollIntervalMillis = pollIntervalMillis; + this.lock = new ReentrantLock(); init(sstFile, sstDumptoolJarPath); } @@ -79,7 +84,7 @@ private void init(File sstFile, String sstDumptoolJarPath) process = HddsServerUtil.getJavaProcess(Collections.emptyList(), sstDumptoolJarPath, SST_DUMP_TOOL_CLASS, args).start(); processOutput = new BufferedReader(new InputStreamReader( - process.getInputStream())); + process.getInputStream(), StandardCharsets.UTF_8)); stdoutString = new StringBuilder(); currentMatcher = PATTERN_MATCHER.matcher(stdoutString); charBuffer = new char[8192]; @@ -100,40 +105,45 @@ public boolean hasNext() { } @Override - public synchronized KeyValue next() throws RuntimeIOException { + public KeyValue next() throws RuntimeIOException { checkSanityOfProcess(); - currentKey = nextKey; - nextKey = null; - while (!currentMatcher.find()) { - try { - if (prevMatchEndIndex != 0) { - stdoutString = new StringBuilder(stdoutString.substring( - prevMatchEndIndex, stdoutString.length())); - prevMatchEndIndex = 0; - currentMatcher = PATTERN_MATCHER.matcher(stdoutString); - } - Thread.sleep(pollIntervalMillis); - int numberOfCharsRead = processOutput.read(charBuffer); - if (numberOfCharsRead < 0) { - if (currentKey != null) { - currentKey.setValue(stdoutString.toString()); + try { + lock.lock(); + currentKey = nextKey; + nextKey = null; + while (!currentMatcher.find()) { + try { + if (prevMatchEndIndex != 0) { + stdoutString = new StringBuilder(stdoutString.substring( + prevMatchEndIndex, stdoutString.length())); + prevMatchEndIndex = 0; + currentMatcher = PATTERN_MATCHER.matcher(stdoutString); + } + Thread.sleep(pollIntervalMillis); + int numberOfCharsRead = processOutput.read(charBuffer); + if (numberOfCharsRead < 0) { + if (currentKey != null) { + currentKey.setValue(stdoutString.toString()); + } + return currentKey; } - return currentKey; + stdoutString.append(charBuffer, 0, numberOfCharsRead); + currentMatcher.reset(); + } catch (IOException | InterruptedException e) { + throw new RuntimeIOException(e); } - stdoutString.append(charBuffer, 0, numberOfCharsRead); - currentMatcher.reset(); - } catch (IOException | InterruptedException e) { - throw new RuntimeIOException(e); } + if (currentKey != null) { + currentKey.setValue(stdoutString.substring(prevMatchEndIndex, + currentMatcher.start())); + } + prevMatchEndIndex = currentMatcher.end(); + nextKey = new KeyValue(currentMatcher.group(1), currentMatcher.group(2), + currentMatcher.group(3)); + return currentKey; + } finally { + lock.unlock(); } - if (currentKey != null) { - currentKey.setValue(stdoutString.substring(prevMatchEndIndex, - currentMatcher.start())); - } - prevMatchEndIndex = currentMatcher.end(); - nextKey = new KeyValue(currentMatcher.group(1), currentMatcher.group(2), - currentMatcher.group(3)); - return currentKey; } @Override diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 8f37c21b03b8..03233a4f854d 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -146,9 +146,7 @@ - - - + @@ -161,7 +159,7 @@ generate-sources - + @@ -173,7 +171,7 @@ generate-sources - + @@ -185,7 +183,7 @@ generate-sources - + @@ -203,7 +201,7 @@ - + diff --git a/hadoop-hdds/rocks-native/src/CMakeLists.txt b/hadoop-hdds/rocks-native/src/CMakeLists.txt index a49a49735fd5..a08973cedde0 100644 --- a/hadoop-hdds/rocks-native/src/CMakeLists.txt +++ b/hadoop-hdds/rocks-native/src/CMakeLists.txt @@ -21,11 +21,14 @@ # cmake_minimum_required(VERSION 3.1 FATAL_ERROR) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC") project(ozone_native) set(CMAKE_BUILD_TYPE Release) find_package(JNI REQUIRED) include_directories(${JNI_INCLUDE_DIRS}) set(CMAKE_CXX_STANDARD ${CMAKE_STANDARDS}) + set(linked_libraries "") if(NOT GENERATED_JAVAH) message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH") diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java index 0f2ce9b0fee7..c3a53f8f71a9 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -34,19 +34,21 @@ public class NativeLibraryLoader { private static final String OS = System.getProperty("os.name").toLowerCase(); private Map librariesLoaded; - private static NativeLibraryLoader instance; + private static volatile NativeLibraryLoader instance; public NativeLibraryLoader(final Map librariesLoaded) { this.librariesLoaded = librariesLoaded; } + private synchronized static void initNewInstance() { + if (instance == null) { + instance = new NativeLibraryLoader(new ConcurrentHashMap<>()); + } + } + public static NativeLibraryLoader getInstance() { if (instance == null) { - synchronized (NativeLibraryLoader.class) { - if (instance == null) { - instance = new NativeLibraryLoader(new ConcurrentHashMap<>()); - } - } + initNewInstance(); } return instance; } From 189cf30c15dd0d7e07d6d2a89c2c122f55bb8581 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 11:08:26 -0800 Subject: [PATCH 07/51] HDDS-8028: Fix checkstyle issue --- .../java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java index c3a53f8f71a9..322c54954ec9 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -40,7 +40,7 @@ public NativeLibraryLoader(final Map librariesLoaded) { this.librariesLoaded = librariesLoaded; } - private synchronized static void initNewInstance() { + private static synchronized void initNewInstance() { if (instance == null) { instance = new NativeLibraryLoader(new ConcurrentHashMap<>()); } From e8af6a02842eba2fb31eab6f641eae4616e9dd3a Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 11:20:07 -0800 Subject: [PATCH 08/51] HDDS-8028: Fix build script --- hadoop-hdds/rocks-native/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 03233a4f854d..997d71d09a59 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -146,7 +146,7 @@ - + From 7d28b6534500e75d5f4e9ae5287c51f763c16ecf Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 12:08:54 -0800 Subject: [PATCH 09/51] HDDS-8028: Fix build script --- hadoop-hdds/rocks-native/pom.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 997d71d09a59..7607d056ae65 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -146,7 +146,9 @@ - + + + From 1a9ba91efdc267f61b4c71387097520e2985e877 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 12:15:02 -0800 Subject: [PATCH 10/51] HDDS-8028: Fix build script --- hadoop-hdds/rocks-native/pom.xml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 7607d056ae65..6030f8f03abb 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -146,7 +146,8 @@ - + + @@ -161,7 +162,9 @@ generate-sources - + + + @@ -173,7 +176,9 @@ generate-sources - + + + @@ -185,7 +190,9 @@ generate-sources - + + + From 9f4e4fd1e0f407d2953616421cbb2c02ac697de8 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 13:07:36 -0800 Subject: [PATCH 11/51] HDDS-8028: Fix build script --- hadoop-hdds/rocks-native/pom.xml | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 6030f8f03abb..b26fc9cbeea1 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -199,6 +199,23 @@ run + + build-snappy + generate-sources + + + + + + + + + + + + run + + build-rocksjava generate-resources @@ -246,16 +263,6 @@ org.apache.hadoop hadoop-maven-plugins - - cmake-compile-snappy - generate-sources - cmake-compile - - ${project.build.directory}/snappy/snappy-${snappy.version} - ${project.build.directory}/snappy/lib - - - cmake-compile-rockstools compile From 10bf03363675b22de364a1a01526dcee65c4e5b8 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 13:37:30 -0800 Subject: [PATCH 12/51] HDDS-8028: Reduce number of threads --- hadoop-hdds/rocks-native/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index b26fc9cbeea1..c05a33262e89 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -229,7 +229,7 @@ - + > From 485f623ae74d2c4efa71680bcba7bdc92728af06 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 14:16:49 -0800 Subject: [PATCH 13/51] HDDS-8028: Add cxx flag --- hadoop-hdds/rocks-native/pom.xml | 61 ++++++++++++++++---------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index c05a33262e89..29aa9ea2dbb8 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -231,13 +231,42 @@ - > + + + + + run + + + + build-rocks-tools + generate-sources + + + + + + + + + + + + + + + + + + + run + @@ -245,7 +274,7 @@ native-maven-plugin - compile + generate-resources javah @@ -259,34 +288,6 @@ - - org.apache.hadoop - hadoop-maven-plugins - - - cmake-compile-rockstools - compile - cmake-compile - - ${basedir}/src - ${project.build.directory}/native/rocksdb - - ${project.build.directory}/native/javah - ${basedir}/src/main/native - ${sstDump.include} - ${cmake.standards} - ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version}/include - ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} - ${project.build.directory}/zlib/zlib-${zlib.version} - ${project.build.directory}/bzip2/bzip2-${bzip2.version} - ${project.build.directory}/lz4/lz4-${lz4.version}/lib - ${project.build.directory}/snappy/lib - ${project.build.directory}/zstd/zstd-${zstd.version}/lib - - - - - maven-antrun-plugin From b6a9e4cfb335a4a7712d7c2c46a61bee59ba6af5 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 14:17:44 -0800 Subject: [PATCH 14/51] HDDS-8028: Add cxx flag --- hadoop-hdds/rocks-native/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 29aa9ea2dbb8..0f3ac85fd222 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -259,7 +259,7 @@ - + From 873a0315f4156c3606d3e2e218b4dc8d1173da09 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 14:23:18 -0800 Subject: [PATCH 15/51] HDDS-8028: fix phase --- hadoop-hdds/rocks-native/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 0f3ac85fd222..e3da6983183b 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -240,7 +240,7 @@ build-rocks-tools - generate-sources + compile From 584b4d761bfd82f9d4e41c3c2f02ef5f9f4bc32f Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 14:44:22 -0800 Subject: [PATCH 16/51] HDDS-8028: fix javah --- hadoop-hdds/rocks-native/pom.xml | 38 ++++++++++++++++---------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index e3da6983183b..b4126abb5c7b 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -115,6 +115,25 @@ + + org.codehaus.mojo + native-maven-plugin + + + compile + + javah + + + ${env.JAVA_HOME}/bin/javah + + org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool + + ${project.build.directory}/native/javah + + + + maven-antrun-plugin @@ -269,25 +288,6 @@ - - org.codehaus.mojo - native-maven-plugin - - - generate-resources - - javah - - - ${env.JAVA_HOME}/bin/javah - - org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool - - ${project.build.directory}/native/javah - - - - maven-antrun-plugin From 004d26ed18f15280d48a3ea09632a336c4d9524f Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 15:07:54 -0800 Subject: [PATCH 17/51] HDDS-8028: fix rockstool compile --- hadoop-hdds/rocks-native/pom.xml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index b4126abb5c7b..658d014bc600 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -264,19 +264,19 @@ - - - - - - - - - - - - + + + + + + + + + + + + From db9e3a44dbdac7f46597d8c2ea799a669399e8c2 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 1 Mar 2023 15:44:49 -0800 Subject: [PATCH 18/51] HDDS-8028: fix rockstool compile --- hadoop-hdds/rocks-native/pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 658d014bc600..144970a10716 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -227,6 +227,7 @@ + @@ -265,6 +266,7 @@ + From ef8f58de0d32fdc694b01c9817ff4fc4802844e4 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 2 Mar 2023 05:45:58 -0800 Subject: [PATCH 19/51] HDDS-8028: Increase github ci timeout --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f540c1df77a3..d894e7c85cad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,7 +61,7 @@ jobs: needs: - build-info runs-on: ubuntu-20.04 - timeout-minutes: 30 + timeout-minutes: 60 if: needs.build-info.outputs.needs-build == 'true' strategy: matrix: From 6ee898430b926568c6979e785ac9a60c670fb84f Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 2 Mar 2023 09:23:11 -0800 Subject: [PATCH 20/51] HDDS-8028: Fix Java JNI Compilation for java 11 --- hadoop-hdds/rocks-native/pom.xml | 114 ++++++++++++++++++++++--------- 1 file changed, 81 insertions(+), 33 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 144970a10716..ba2b3a6a4464 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -32,8 +32,6 @@ 23 true - - @@ -115,25 +113,7 @@ - - org.codehaus.mojo - native-maven-plugin - - - compile - - javah - - - ${env.JAVA_HOME}/bin/javah - - org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool - - ${project.build.directory}/native/javah - - - - + maven-antrun-plugin @@ -154,11 +134,6 @@ run - - - - maven-antrun-plugin - build-zlib generate-sources @@ -260,7 +235,7 @@ build-rocks-tools - compile + process-classes @@ -287,12 +262,6 @@ run - - - - - maven-antrun-plugin - copy-lib-file prepare-package @@ -326,6 +295,85 @@ + + + java-8 + + 1.8 + + + + + org.codehaus.mojo + native-maven-plugin + + + compile + + javah + + + ${env.JAVA_HOME}/bin/javah + + org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool + + ${project.build.directory}/native/javah + + + + + + + + + java-11 + + + javac.version + 11 + + + + + + org.codehaus.mojo + native-maven-plugin + + + javach + + exec + + compile + + ${env.JAVA_HOME}/bin/javac + + -classpath + ${project.build.outputDirectory} + -h + ${project.build.directory}/native/javah + + + + + compile + + javah + + + ${env.JAVA_HOME}/bin/javah + + org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool + + ${project.build.directory}/native/javah + + + + + + + + \ No newline at end of file From 3974a6917fb83cfb6ca0add40fc8cb33c5d0d954 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 2 Mar 2023 11:07:22 -0800 Subject: [PATCH 21/51] HDDS-8028: Fix Java JNI Compilation for java 11 --- hadoop-hdds/rocks-native/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index ba2b3a6a4464..c0c944c1dcd2 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -337,7 +337,7 @@ org.codehaus.mojo - native-maven-plugin + exec-maven-plugin javach From 246820acd115483edd5d2fd5450a5763aa7cefa5 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 2 Mar 2023 11:54:40 -0800 Subject: [PATCH 22/51] HDDS-8028: Fix Java JNI Compilation for java 11 --- hadoop-hdds/rocks-native/pom.xml | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index c0c944c1dcd2..354083e5a678 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -355,19 +355,6 @@ - - compile - - javah - - - ${env.JAVA_HOME}/bin/javah - - org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool - - ${project.build.directory}/native/javah - - From f192cba541614c9828c035eddc72b2b0480fca4b Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 2 Mar 2023 13:31:53 -0800 Subject: [PATCH 23/51] HDDS-8028: Fix Java JNI Compilation for java 11 --- hadoop-hdds/rocks-native/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 354083e5a678..40604acdad92 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -352,6 +352,7 @@ ${project.build.outputDirectory} -h ${project.build.directory}/native/javah + ${project.basedir}/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java From b4e2735a10ca53a817ce7d9d6db7bfd0a7f7b600 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 3 Mar 2023 18:39:40 -0800 Subject: [PATCH 24/51] HDDS-8028: Add Jni for creating pipe between sst dump output & iterator --- .../hadoop/hdds/utils/HddsServerUtil.java | 25 --- .../hadoop/hdds/utils/MessageQueue.java | 188 ------------------ .../utils/db/managed/ManagedRocksObject.java | 35 ---- hadoop-hdds/rocks-native/pom.xml | 40 +++- hadoop-hdds/rocks-native/src/CMakeLists.txt | 2 +- .../db/managed/ManagedSSTDumpIterator.java | 95 +++++++-- .../utils/db/managed/ManagedSSTDumpTool.java | 85 +++++++- .../utils/db/managed/PipeInputStream.java | 101 ++++++++++ .../rocks-native/src/main/native/Pipe.cpp | 20 ++ .../rocks-native/src/main/native/Pipe.h | 37 ++++ .../src/main/native/PipeInputStream.cpp | 34 ++++ .../src/main/native/SSTDumpTool.cpp | 28 ++- .../main/native/cplusplus_to_java_convert.h | 37 ++++ 13 files changed, 432 insertions(+), 295 deletions(-) delete mode 100644 hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java delete mode 100644 hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java rename hadoop-hdds/{framework => rocks-native}/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java (68%) create mode 100644 hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java create mode 100644 hadoop-hdds/rocks-native/src/main/native/Pipe.cpp create mode 100644 hadoop-hdds/rocks-native/src/main/native/Pipe.h create mode 100644 hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp create mode 100644 hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java index 3e912d887d80..63c954529228 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java @@ -584,29 +584,4 @@ public static IOException toIOException(String msg, RocksDBException e) { public static void addSuppressedLoggingExceptions(RPC.Server server) { server.addSuppressedLoggingExceptions(ServerNotLeaderException.class); } - - /** - * Build javaProcessBuilder. - * @param jvmArgs - * @param classpath - * @param className - * @param args - * @return ProcessBuilder - */ - public static ProcessBuilder getJavaProcess(List jvmArgs, - String classpath, - String className, - List args) { - String javaHome = System.getProperty("java.home"); - String javaBin = String.format("%1$s%2$sbin%2$sjava", - javaHome, File.separator); - List command = Lists.newArrayList(); - command.add(javaBin); - command.addAll(jvmArgs); - command.add("-cp"); - command.add(classpath); - command.add(className); - command.addAll(args); - return new ProcessBuilder(command); - } } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java deleted file mode 100644 index 5f2292141e45..000000000000 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/MessageQueue.java +++ /dev/null @@ -1,188 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package org.apache.hadoop.hdds.utils; - -import org.apache.commons.io.input.buffer.CircularByteBuffer; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.concurrent.Callable; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - -/** - * Message Queue to Pipe output from one output stream to another inputstream. - */ -public class MessageQueue { - private CircularByteBuffer byteBuffer; - private AtomicBoolean isRunning; - - private MessageOutputStream messageOutputStream; - private MessageInputStream messageInputStream; - - public MessageQueue(int bufferSize, long pollIntervalMillis) { - this.pollIntervalMillis = pollIntervalMillis; - init(bufferSize); - } - - private void init(int bufferSize) { - this.byteBuffer = new CircularByteBuffer(bufferSize); - this.isRunning = new AtomicBoolean(false); - this.messageInputStream = new MessageInputStream(this); - this.messageOutputStream = new MessageOutputStream(this); - } - - public void start() { - this.isRunning.set(true); - } - - public void stop() { - this.isRunning.set(false); - } - - public MessageOutputStream getMessageOutputStream() { - return messageOutputStream; - } - - public MessageInputStream getMessageInputStream() { - return messageInputStream; - } - - private long pollIntervalMillis; - - public boolean isRunning() { - return isRunning.get(); - } - - private long getPollIntervalMillis() { - return pollIntervalMillis; - } - - private boolean hasSpace(int requiredLength) { - return this.byteBuffer.hasSpace(requiredLength); - } - - private boolean hasBytes() { - return this.byteBuffer.hasBytes(); - } - - private int getCurrentNumberOfBytes() { - return this.byteBuffer.getCurrentNumberOfBytes(); - } - - private void add(byte[] b, int off, int len) { - this.byteBuffer.add(b, off, len); - } - - private int read(byte[] b, int off, int len) { - this.byteBuffer.read(b, off, len); - return len; - } - - private static T callWithLock(Lock lock, Callable callable) - throws Exception { - lock.lock(); - try { - return callable.call(); - } finally { - lock.unlock(); - } - } - private static final class MessageOutputStream extends OutputStream { - - private MessageQueue messageQueue; - private Lock writeLock; - - private MessageOutputStream(MessageQueue messageQueue) { - this.messageQueue = messageQueue; - this.writeLock = new ReentrantLock(); - - } - - private void waitForBytes(int requiredLength) throws InterruptedException { - while (!this.messageQueue.hasSpace(requiredLength)) { - Thread.sleep(this.messageQueue.getPollIntervalMillis()); - } - } - - - - @Override - public void write(int b) throws IOException { - this.write(new byte[]{(byte) b}); - } - - @Override - public void write(byte[] b, int off, int len) throws IOException { - try { - if (!this.messageQueue.isRunning()) { - throw new IOException("Message Queue is Closed"); - } - waitForBytes(len); - callWithLock(this.writeLock, () -> { - waitForBytes(len); - this.messageQueue.add(b, off, len); - return true; - }); - } catch (Exception e) { - throw new IOException(e); - } - } - } - private static final class MessageInputStream extends InputStream { - - private MessageQueue messageQueue; - private Lock readLock; - - private MessageInputStream(MessageQueue messageQueue) { - this.messageQueue = messageQueue; - this.readLock = new ReentrantLock(); - } - - private void waitForBytes() throws InterruptedException { - while (!this.messageQueue.hasBytes() && this.messageQueue.isRunning()) { - Thread.sleep(messageQueue.getPollIntervalMillis()); - } - } - - @Override - public int read() throws IOException { - byte[] readByte = new byte[1]; - int numberOfBytesRead = this.read(readByte); - return numberOfBytesRead == -1 ? -1 : (readByte[0] & 0xff); - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - try { - return MessageQueue.callWithLock(this.readLock, () -> { - waitForBytes(); - if (!this.messageQueue.isRunning()) { - return -1; - } - return this.messageQueue.read(b, off, Math.min(len, - this.messageQueue.getCurrentNumberOfBytes())); - }); - } catch (Exception e) { - throw new IOException(e); - } - } - } -} diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java deleted file mode 100644 index 396949a8643e..000000000000 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRocksObject.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.utils.db.managed; - -import org.rocksdb.RocksObject; -/** - * Managed RocksObject. - */ -public abstract class ManagedRocksObject extends RocksObject { - protected ManagedRocksObject(long nativeHandle) { - super(nativeHandle); - } - - @Override - protected void finalize() throws Throwable { - ManagedRocksObjectUtils.assertClosed(this); - super.finalize(); - } -} diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 40604acdad92..701de921b576 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -25,6 +25,10 @@ hdds-rocks-native + + org.apache.ozone + hdds-server-framework + 8 @@ -113,7 +117,25 @@ - + + org.apache.maven.plugins + maven-patch-plugin + 1.1.1 + + ${basedir}/src/main/patches/rocks-native.patch + 1 + ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} + + + + patch + process-sources + + apply + + + + maven-antrun-plugin @@ -136,7 +158,7 @@ build-zlib - generate-sources + process-sources @@ -153,7 +175,7 @@ build-bzip2 - generate-sources + process-sources @@ -167,7 +189,7 @@ build-lz4 - generate-sources + process-sources @@ -181,7 +203,7 @@ build-zstd - generate-sources + process-sources @@ -195,7 +217,7 @@ build-snappy - generate-sources + process-sources @@ -213,7 +235,7 @@ build-rocksjava - generate-resources + process-resources @@ -224,7 +246,7 @@ - + @@ -316,6 +338,7 @@ ${env.JAVA_HOME}/bin/javah org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool + org.apache.hadoop.hdds.utils.db.managed.PipeInputStream ${project.build.directory}/native/javah @@ -353,6 +376,7 @@ -h ${project.build.directory}/native/javah ${project.basedir}/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java + ${project.basedir}/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java diff --git a/hadoop-hdds/rocks-native/src/CMakeLists.txt b/hadoop-hdds/rocks-native/src/CMakeLists.txt index a08973cedde0..de20ff2513e8 100644 --- a/hadoop-hdds/rocks-native/src/CMakeLists.txt +++ b/hadoop-hdds/rocks-native/src/CMakeLists.txt @@ -36,7 +36,7 @@ endif() include_directories(${GENERATED_JAVAH}) if(${SST_DUMP_INCLUDE}) include_directories(${ROCKSDB_HEADERS}) - set(SOURCE_FILES ${NATIVE_DIR}/SSTDumpTool.cpp) + set(SOURCE_FILES ${NATIVE_DIR}/SSTDumpTool.cpp ${NATIVE_DIR}/PipeInputStream.cpp ${NATIVE_DIR}/Pipe.h ${NATIVE_DIR}/Pipe.cpp ${NATIVE_DIR}/cplusplus_to_java_convert.h) ADD_LIBRARY(rocksdb STATIC IMPORTED) set_target_properties( rocksdb diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java similarity index 68% rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java rename to hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index b36ca9c94443..dfa62e71b98b 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -19,6 +19,7 @@ import com.google.common.collect.Lists; import org.apache.hadoop.hdds.utils.HddsServerUtil; +import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; import org.eclipse.jetty.io.RuntimeIOException; import java.io.BufferedReader; @@ -29,6 +30,9 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Matcher; @@ -39,7 +43,6 @@ */ public class ManagedSSTDumpIterator implements Iterator, AutoCloseable { - private Process process; private static final String SST_DUMP_TOOL_CLASS = "org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool"; private static final String PATTERN_REGEX = @@ -60,12 +63,16 @@ public class ManagedSSTDumpIterator implements private KeyValue nextKey; private long pollIntervalMillis; + private ManagedSSTDumpTool.SSTDumpToolTask sstDumpToolTask; private Lock lock; + private AtomicBoolean open; - public ManagedSSTDumpIterator(String sstDumptoolJarPath, + public ManagedSSTDumpIterator(ManagedSSTDumpTool sstDumpTool, String sstFilePath, - long pollIntervalMillis) throws IOException { + ManagedOptions options, + long pollIntervalMillis) throws IOException, + NativeLibraryNotLoadedException { File sstFile = new File(sstFilePath); if (!sstFile.exists() || !sstFile.isFile()) { throw new IOException(String.format("Invalid SST File Path : %s", @@ -73,42 +80,61 @@ public ManagedSSTDumpIterator(String sstDumptoolJarPath, } this.pollIntervalMillis = pollIntervalMillis; this.lock = new ReentrantLock(); - init(sstFile, sstDumptoolJarPath); + init(sstDumpTool, sstFile, options); } - private void init(File sstFile, String sstDumptoolJarPath) - throws IOException { - List args = Lists.newArrayList( - "--file=" + sstFile.getAbsolutePath(), - "--command=scan"); - process = HddsServerUtil.getJavaProcess(Collections.emptyList(), - sstDumptoolJarPath, SST_DUMP_TOOL_CLASS, args).start(); + private void init(ManagedSSTDumpTool sstDumpTool, File sstFile, + ManagedOptions options) + throws NativeLibraryNotLoadedException { + String[] args = {"--file=" + sstFile.getAbsolutePath(), + "--command=scan"}; + this.sstDumpToolTask = sstDumpTool.run(args, options); processOutput = new BufferedReader(new InputStreamReader( - process.getInputStream(), StandardCharsets.UTF_8)); + sstDumpToolTask.getPipedOutput(), StandardCharsets.UTF_8)); stdoutString = new StringBuilder(); currentMatcher = PATTERN_MATCHER.matcher(stdoutString); charBuffer = new char[8192]; + open = new AtomicBoolean(true); next(); } + /** + * Throws Runtime exception in the case iterator is closed or + * the native Dumptool exited with non zero exit value. + */ private void checkSanityOfProcess() { - if (!process.isAlive() && process.exitValue() != 0) { + if (!this.open.get()) { + throw new RuntimeException("Iterator has been closed"); + } + if (sstDumpToolTask.getFuture().isDone() + && sstDumpToolTask.exitValue() != 0) { throw new RuntimeException("Process Terminated with non zero " + - String.format("exit value %d", process.exitValue())); + String.format("exit value %d", sstDumpToolTask.exitValue())); } } + /** + * + * @return + * Throws Runtime Exception in case of SST File read failure + */ + @Override public boolean hasNext() { checkSanityOfProcess(); return nextKey != null; } + /** + * + * @return next Key + * Throws Runtime Exception incase of failure. + */ @Override - public KeyValue next() throws RuntimeIOException { - checkSanityOfProcess(); + public KeyValue next() { + lock.lock(); try { - lock.lock(); + checkSanityOfProcess(); currentKey = nextKey; nextKey = null; while (!currentMatcher.find()) { @@ -148,12 +174,25 @@ public KeyValue next() throws RuntimeIOException { @Override public synchronized void close() throws Exception { - if (this.process != null) { - this.process.destroyForcibly(); - this.processOutput.close(); + lock.lock(); + try { + if (this.sstDumpToolTask != null) { + if (!this.sstDumpToolTask.getFuture().isDone()) { + this.sstDumpToolTask.getFuture().cancel(true); + } + this.processOutput.close(); + } + open.compareAndSet(true, false); + } finally { + lock.unlock(); } } + @Override + protected void finalize() throws Throwable { + this.close(); + } + /** * Class containing Parsed KeyValue Record from Sst Dumptool output. */ @@ -200,4 +239,20 @@ public String toString() { '}'; } } + + public static void main(String[] args) throws NativeLibraryNotLoadedException, IOException { + ManagedSSTDumpTool sstDumpTool = + new ManagedSSTDumpTool(new ForkJoinPool(), 50); + try (ManagedOptions options = new ManagedOptions(); + ManagedSSTDumpIterator iterator = new ManagedSSTDumpIterator(sstDumpTool, + "/Users/sbalachandran/Documents/code/dummyrocks/rocks/000013.sst", options, 2000); + ) { + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + + } } diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index 771c64c720ee..fe2b63b36769 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -21,13 +21,22 @@ import org.apache.hadoop.hdds.utils.NativeLibraryLoader; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; +import java.io.BufferedReader; import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.Future; import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; /** - * JNI for RocksDB SSTDumpTool. + * JNI for RocksDB SSTDumpTool. Pipes the output to an output stream */ public class ManagedSSTDumpTool { @@ -35,30 +44,84 @@ public class ManagedSSTDumpTool { NativeLibraryLoader.getInstance() .loadLibrary(ROCKS_TOOLS_NATIVE_LIBRARY_NAME); } + private int bufferCapacity; + private ExecutorService executorService; - public ManagedSSTDumpTool() throws NativeLibraryNotLoadedException { + public ManagedSSTDumpTool(ExecutorService executorService, + int bufferCapacity) + throws NativeLibraryNotLoadedException { if (!NativeLibraryLoader.getInstance() .isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { throw new NativeLibraryNotLoadedException( ROCKS_TOOLS_NATIVE_LIBRARY_NAME); } - + this.bufferCapacity = bufferCapacity; + this.executorService = executorService; } - public void run(String[] args) { - this.runInternal(args); + public SSTDumpToolTask run(String[] args, ManagedOptions options) + throws NativeLibraryNotLoadedException { + PipeInputStream pipeInputStream = new PipeInputStream(bufferCapacity); + return new SSTDumpToolTask(this.executorService.submit(() -> + this.runInternal(args, options.getNativeHandle(), + pipeInputStream.getNativeHandle())), pipeInputStream); } - public void run(Map args) { - this.run(args.entrySet().stream().map(e -> "--" + public SSTDumpToolTask run(Map args, ManagedOptions options) + throws NativeLibraryNotLoadedException { + return this.run(args.entrySet().stream().map(e -> "--" + (e.getValue() == null || e.getValue().isEmpty() ? e.getKey() : - e.getKey() + "=" + e.getValue())).toArray(String[]::new)); + e.getKey() + "=" + e.getValue())).toArray(String[]::new), options); } - private native void runInternal(String[] args); + private native int runInternal(String[] args, long optionsHandle, + long pipeHandle); public static void main(String[] args) - throws NativeLibraryNotLoadedException, FileNotFoundException { - new ManagedSSTDumpTool().run(args); + throws NativeLibraryNotLoadedException, IOException { + SSTDumpToolTask task = new ManagedSSTDumpTool(new ForkJoinPool(), 50) + .run(new String[]{"--file=/Users/sbalachandran/Documents/code/dummyrocks/rocks/000013.sst", + "--command=scan"}, + new ManagedOptions()); + BufferedReader b = new BufferedReader(new InputStreamReader( + task.getPipedOutput())); + + char[] a = new char[10]; + int numberOfCharsRead = 0; + System.out.println("Starting Loop"); + do { + System.out.print(String.valueOf(a, 0, numberOfCharsRead)); + numberOfCharsRead = b.read(a); + }while(numberOfCharsRead >=0); + System.out.println("Loop"); + } + + static class SSTDumpToolTask { + private Future future; + private PipeInputStream pipedOutput; + + SSTDumpToolTask(Future future, PipeInputStream pipedOutput) { + this.future = future; + this.pipedOutput = pipedOutput; + } + + public Future getFuture() { + return future; + } + + public PipeInputStream getPipedOutput() { + return pipedOutput; + } + + public int exitValue() { + if (this.future.isDone()) { + try { + return future.get(); + } catch (InterruptedException | ExecutionException e) { + return 1; + } + } + return 0; + } } } diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java new file mode 100644 index 000000000000..c8150bffaf8f --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java @@ -0,0 +1,101 @@ +package org.apache.hadoop.hdds.utils.db.managed; + +import org.apache.hadoop.hdds.utils.NativeLibraryLoader; +import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; + +public class PipeInputStream extends InputStream { + + static { + NativeLibraryLoader.getInstance() + .loadLibrary(ROCKS_TOOLS_NATIVE_LIBRARY_NAME); + } + byte[] byteBuffer; + private long nativeHandle; + private int numberOfBytesLeftToRead; + private int index = 0; + private int capacity; + + private AtomicBoolean cleanup; + + PipeInputStream(int capacity) throws NativeLibraryNotLoadedException { + if (!NativeLibraryLoader.getInstance() + .isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { + throw new NativeLibraryNotLoadedException( + ROCKS_TOOLS_NATIVE_LIBRARY_NAME); + } + + this.byteBuffer = new byte[capacity]; + this.numberOfBytesLeftToRead = 0; + this.capacity = capacity; + this.nativeHandle = newPipe(); + this.cleanup = new AtomicBoolean(false); + } + + long getNativeHandle() { + return nativeHandle; + } + + @Override + public int read() { + if (numberOfBytesLeftToRead < 0) { + this.close(); + return -1; + } + if (numberOfBytesLeftToRead == 0) { + numberOfBytesLeftToRead = readInternal(byteBuffer, capacity, + nativeHandle); + index = 0; + return read(); + } + numberOfBytesLeftToRead--; + int ret = byteBuffer[index] & 0xFF; + index+=1; + return ret; + } + + private native long newPipe(); + + private native int readInternal(byte[] byteBuffer, int capacity, + long nativeHandle); + private native void closeInternal(long nativeHandle); + + @Override + public void close() { + if (this.cleanup.compareAndSet(false, true)) { + closeInternal(this.nativeHandle); + } + } + + @Override + protected void finalize() throws Throwable { + close(); + super.finalize(); + } + + public static void main(String[] args) throws IOException, + NativeLibraryNotLoadedException { + PipeInputStream p = new PipeInputStream(50); + System.out.println(Arrays.toString(p.byteBuffer)); + BufferedReader bufferedReader =new BufferedReader(new InputStreamReader(p)); + char[] a = new char[5]; + int numberOfCharsRead = 0; + StringBuilder stringBuilder =new StringBuilder(); + do { + stringBuilder.append(a, 0, numberOfCharsRead); + numberOfCharsRead = bufferedReader.read(a); + }while(numberOfCharsRead >=0); + p.readInternal(p.byteBuffer, p.capacity,p.nativeHandle); + System.out.println(stringBuilder.toString()); + System.out.println(Arrays.toString(p.byteBuffer)); + + } +} diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp new file mode 100644 index 000000000000..310c67f7e7b3 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp @@ -0,0 +1,20 @@ +// +// Created by Swaminathan Balachandran on 3/2/23. +// + +#include "Pipe.h" +#include + +Pipe::Pipe() { + pipe(p); + open = true; +} + +Pipe::~Pipe() { + ::close(p[0]); + ::close(p[1]); +} + +void Pipe::close() { + open = false; +} diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.h b/hadoop-hdds/rocks-native/src/main/native/Pipe.h new file mode 100644 index 000000000000..e7de632033c9 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.h @@ -0,0 +1,37 @@ +// +// Created by Swaminathan Balachandran on 3/2/23. +// + +#ifndef UNTITLED_PIPE_H +#define UNTITLED_PIPE_H + + +#include + +class Pipe { + public: + Pipe(); + ~Pipe(); + void close(); + int getReadFd() { + return p[0]; + } + + int getWriteFd() { + return p[1]; + } + + bool isOpen() { + return open; + } + + + private: + int p[2]; + FILE* wr; + bool open; + +}; + + +#endif //UNTITLED_PIPE_H diff --git a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp new file mode 100644 index 000000000000..8ac9ae7334e2 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp @@ -0,0 +1,34 @@ +// +// Created by Swaminathan Balachandran on 3/2/23. +// +#include +#include +//#include +#include "Pipe.h" +#include "cplusplus_to_java_convert.h" +#include "org_apache_hadoop_hdds_utils_db_managed_PipeInputStream.h" + + +jlong Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_newPipe(JNIEnv *, jobject) { + Pipe* pipe = new Pipe(); + return GET_CPLUSPLUS_POINTER(pipe); +} + +jint Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_readInternal(JNIEnv *env, jobject object, jbyteArray jbyteArray, jint capacity, jlong nativeHandle) { + int cap_int = capacity; + Pipe* pipe = reinterpret_cast(nativeHandle); + jbyte* b = (env)->GetByteArrayElements(jbyteArray, JNI_FALSE); + cap_int = read(pipe->getReadFd(), b, cap_int); + if (cap_int == 0) { + if (!pipe->isOpen()) { + cap_int = -1; + } + } + (env)->ReleaseByteArrayElements(jbyteArray, b, 0); + return cap_int; +} + +void Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_closeInternal(JNIEnv *env, jobject object, jlong nativeHandle) { + delete reinterpret_cast(nativeHandle); +} + diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index a2634ee06f80..903e372b8e83 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -20,16 +20,30 @@ #include "rocksdb/options.h" #include "rocksdb/sst_dump_tool.h" #include "string" +#include "cplusplus_to_java_convert.h" +#include "Pipe.h" +#include "iostream" -void Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray) { +jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray, +jlong optionsHandle, jlong pipeHandle) { ROCKSDB_NAMESPACE::SSTDumpTool dumpTool; + ROCKSDB_NAMESPACE::Options* options = reinterpret_cast(optionsHandle); + Pipe* pipe = reinterpret_cast(pipeHandle); int length = env->GetArrayLength(argsArray); - const char* args[length + 1]; + char* args[length + 1]; args[0] = strdup("./sst_dump"); - for(int i = 0; i < env->GetArrayLength(argsArray); i++) { - - args[i+1] = (char*)env->GetStringUTFChars((jstring)env-> - GetObjectArrayElement(argsArray, (jsize)i), JNI_FALSE); + for(int i = 0; i < length; i++) { + jstring str_val = (jstring)env->GetObjectArrayElement(argsArray, (jsize)i); + char* utf_str = (char*)env->GetStringUTFChars(str_val, JNI_FALSE); + args[i+1] = strdup(utf_str); + env->ReleaseStringUTFChars(str_val, utf_str); + } + FILE* wr = fdopen(pipe->getWriteFd(), "w"); + int ret = dumpTool.Run(length + 1, args, *options, wr); + for (int i = 0; iclose(); + return ret; } \ No newline at end of file diff --git a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h new file mode 100644 index 000000000000..0eea6fa2c495 --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h @@ -0,0 +1,37 @@ +// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. +// This source code is licensed under both the GPLv2 (found in the +// COPYING file in the root directory) and Apache 2.0 License +// (found in the LICENSE.Apache file in the root directory). + +#pragma once + +/* + * This macro is used for 32 bit OS. In 32 bit OS, the result number is a + negative number if we use reinterpret_cast(pointer). + * For example, jlong ptr = reinterpret_cast(pointer), ptr is a negative + number in 32 bit OS. + * If we check ptr using ptr > 0, it fails. For example, the following code is + not correct. + * if (jblock_cache_handle > 0) { + std::shared_ptr *pCache = + reinterpret_cast *>( + jblock_cache_handle); + options.block_cache = *pCache; + } + * But the result number is positive number if we do + reinterpret_cast(pointer) first and then cast it to jlong. size_t is 4 + bytes long in 32 bit OS and 8 bytes long in 64 bit OS. + static_cast(reinterpret_cast(_pointer)) is also working in 64 + bit OS. + * + * We don't need an opposite cast because it works from jlong to c++ pointer in + both 32 bit and 64 bit OS. + * For example, the following code is working in both 32 bit and 64 bit OS. + jblock_cache_handle is jlong. + * std::shared_ptr *pCache = + reinterpret_cast *>( + jblock_cache_handle); +*/ + +#define GET_CPLUSPLUS_POINTER(_pointer) \ + static_cast(reinterpret_cast(_pointer)) From 4f7f75b73de468e7073c93c446901e52980a3b50 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 3 Mar 2023 18:49:21 -0800 Subject: [PATCH 25/51] HDDS-8028: Fix Checkstyle Issues --- .../hadoop/hdds/utils/HddsServerUtil.java | 2 -- .../db/managed/ManagedSSTDumpIterator.java | 30 +++------------- .../utils/db/managed/ManagedSSTDumpTool.java | 27 -------------- .../utils/db/managed/PipeInputStream.java | 36 +++++-------------- 4 files changed, 13 insertions(+), 82 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java index 63c954529228..68423e608574 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdds.utils; import com.google.common.base.Strings; -import com.google.common.collect.Lists; import com.google.protobuf.BlockingService; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveOutputStream; @@ -62,7 +61,6 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.concurrent.TimeUnit; diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index dfa62e71b98b..acfa32ecb607 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -17,8 +17,6 @@ package org.apache.hadoop.hdds.utils.db.managed; -import com.google.common.collect.Lists; -import org.apache.hadoop.hdds.utils.HddsServerUtil; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; import org.eclipse.jetty.io.RuntimeIOException; @@ -27,11 +25,7 @@ import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.Iterator; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -87,7 +81,7 @@ private void init(ManagedSSTDumpTool sstDumpTool, File sstFile, ManagedOptions options) throws NativeLibraryNotLoadedException { String[] args = {"--file=" + sstFile.getAbsolutePath(), - "--command=scan"}; + "--command=scan"}; this.sstDumpToolTask = sstDumpTool.run(args, options); processOutput = new BufferedReader(new InputStreamReader( sstDumpToolTask.getPipedOutput(), StandardCharsets.UTF_8)); @@ -164,8 +158,10 @@ public KeyValue next() { currentMatcher.start())); } prevMatchEndIndex = currentMatcher.end(); - nextKey = new KeyValue(currentMatcher.group(1), currentMatcher.group(2), - currentMatcher.group(3)); + nextKey = new KeyValue( + currentMatcher.group(PATTERN_KEY_GROUP_NUMBER), + currentMatcher.group(PATTERN_SEQ_GROUP_NUMBER), + currentMatcher.group(PATTERN_TYPE_GROUP_NUMBER)); return currentKey; } finally { lock.unlock(); @@ -239,20 +235,4 @@ public String toString() { '}'; } } - - public static void main(String[] args) throws NativeLibraryNotLoadedException, IOException { - ManagedSSTDumpTool sstDumpTool = - new ManagedSSTDumpTool(new ForkJoinPool(), 50); - try (ManagedOptions options = new ManagedOptions(); - ManagedSSTDumpIterator iterator = new ManagedSSTDumpIterator(sstDumpTool, - "/Users/sbalachandran/Documents/code/dummyrocks/rocks/000013.sst", options, 2000); - ) { - while (iterator.hasNext()) { - System.out.println(iterator.next()); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - - } } diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index fe2b63b36769..93d55d49b9a8 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -21,16 +21,9 @@ import org.apache.hadoop.hdds.utils.NativeLibraryLoader; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.Arrays; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.ForkJoinPool; import java.util.concurrent.Future; import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; @@ -76,26 +69,6 @@ public SSTDumpToolTask run(Map args, ManagedOptions options) private native int runInternal(String[] args, long optionsHandle, long pipeHandle); - - public static void main(String[] args) - throws NativeLibraryNotLoadedException, IOException { - SSTDumpToolTask task = new ManagedSSTDumpTool(new ForkJoinPool(), 50) - .run(new String[]{"--file=/Users/sbalachandran/Documents/code/dummyrocks/rocks/000013.sst", - "--command=scan"}, - new ManagedOptions()); - BufferedReader b = new BufferedReader(new InputStreamReader( - task.getPipedOutput())); - - char[] a = new char[10]; - int numberOfCharsRead = 0; - System.out.println("Starting Loop"); - do { - System.out.print(String.valueOf(a, 0, numberOfCharsRead)); - numberOfCharsRead = b.read(a); - }while(numberOfCharsRead >=0); - System.out.println("Loop"); - } - static class SSTDumpToolTask { private Future future; private PipeInputStream pipedOutput; diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java index c8150bffaf8f..aecbec527dea 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java @@ -2,23 +2,21 @@ import org.apache.hadoop.hdds.utils.NativeLibraryLoader; import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException; - -import java.io.BufferedReader; -import java.io.IOException; import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_LIBRARY_NAME; +/** + * JNI for reading data from pipe. + */ public class PipeInputStream extends InputStream { static { NativeLibraryLoader.getInstance() .loadLibrary(ROCKS_TOOLS_NATIVE_LIBRARY_NAME); } - byte[] byteBuffer; + private byte[] byteBuffer; private long nativeHandle; private int numberOfBytesLeftToRead; private int index = 0; @@ -58,15 +56,15 @@ public int read() { } numberOfBytesLeftToRead--; int ret = byteBuffer[index] & 0xFF; - index+=1; + index += 1; return ret; } private native long newPipe(); - private native int readInternal(byte[] byteBuffer, int capacity, - long nativeHandle); - private native void closeInternal(long nativeHandle); + private native int readInternal(byte[] buff, int numberOfBytes, + long pipeHandle); + private native void closeInternal(long pipeHandle); @Override public void close() { @@ -80,22 +78,4 @@ protected void finalize() throws Throwable { close(); super.finalize(); } - - public static void main(String[] args) throws IOException, - NativeLibraryNotLoadedException { - PipeInputStream p = new PipeInputStream(50); - System.out.println(Arrays.toString(p.byteBuffer)); - BufferedReader bufferedReader =new BufferedReader(new InputStreamReader(p)); - char[] a = new char[5]; - int numberOfCharsRead = 0; - StringBuilder stringBuilder =new StringBuilder(); - do { - stringBuilder.append(a, 0, numberOfCharsRead); - numberOfCharsRead = bufferedReader.read(a); - }while(numberOfCharsRead >=0); - p.readInternal(p.byteBuffer, p.capacity,p.nativeHandle); - System.out.println(stringBuilder.toString()); - System.out.println(Arrays.toString(p.byteBuffer)); - - } } From 35558b5b0224a97ba8404ea8383fc4020984fd71 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 3 Mar 2023 21:55:16 -0800 Subject: [PATCH 26/51] HDDS-8028: Add Maven Profile --- .github/workflows/ci.yml | 2 +- hadoop-hdds/pom.xml | 5 + hadoop-hdds/rocks-native/pom.xml | 581 ++++++++++++++++--------------- hadoop-ozone/dist/pom.xml | 7 +- hadoop-ozone/pom.xml | 5 + 5 files changed, 316 insertions(+), 284 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d894e7c85cad..f540c1df77a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,7 +61,7 @@ jobs: needs: - build-info runs-on: ubuntu-20.04 - timeout-minutes: 60 + timeout-minutes: 30 if: needs.build-info.outputs.needs-build == 'true' strategy: matrix: diff --git a/hadoop-hdds/pom.xml b/hadoop-hdds/pom.xml index 1aaa21f9f9f6..7a68f66f5449 100644 --- a/hadoop-hdds/pom.xml +++ b/hadoop-hdds/pom.xml @@ -242,6 +242,11 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> bcpkix-jdk15on ${bouncycastle.version} + + org.apache.ozone + hdds-rocks-native + ${hdds.version} + diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 701de921b576..75fbfe42d0e7 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -27,301 +27,318 @@ org.apache.ozone - hdds-server-framework + hdds-common + + + org.eclipse.jetty + jetty-io 8 8 - 23 - true - - - - com.googlecode.maven-download-plugin - download-maven-plugin - - - rocksdb source download - generate-sources - - wget - - - https://github.com/facebook/rocksdb/archive/refs/tags/v${rocksdb.version}.tar.gz - rocksdb-v${rocksdb.version}.tar.gz - ${project.build.directory}/rocksdb - - - - zlib source download - generate-sources - - wget - - - https://zlib.net/zlib-${zlib.version}.tar.gz - zlib-${zlib.version}.tar.gz - ${project.build.directory}/zlib - - - - bzip2 source download - generate-sources - - wget - - - https://sourceware.org/pub/bzip2/bzip2-${bzip2.version}.tar.gz - bzip2-v${bzip2.version}.tar.gz - ${project.build.directory}/bzip2 - - - - lz4 source download - generate-sources - - wget - - - https://github.com/lz4/lz4/archive/refs/tags/v${lz4.version}.tar.gz - lz4-v${lz4.version}.tar.gz - ${project.build.directory}/lz4 - - - - snappy source download - generate-sources - - wget - - - https://github.com/google/snappy/archive/refs/tags/${snappy.version}.tar.gz - snappy-v${snappy.version}.tar.gz - ${project.build.directory}/snappy - - - - zstd source download - generate-sources - - wget - - - https://github.com/facebook/zstd/archive/refs/tags/v${zstd.version}.tar.gz - zstd-v${zstd.version}.tar.gz - ${project.build.directory}/zstd - - - - - - org.apache.maven.plugins - maven-patch-plugin - 1.1.1 - - ${basedir}/src/main/patches/rocks-native.patch - 1 - ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} - - - - patch - process-sources - - apply - - - - - - maven-antrun-plugin - - - unzip-artifact - generate-sources - - - - - - - - - - - - run - - - - build-zlib - process-sources - - - - - - - - - - - - run - - - - build-bzip2 - process-sources - - - - - - - - - run - - - - build-lz4 - process-sources - - - - - - - - - run - - - - build-zstd - process-sources - - - - - - - - - run - - - - build-snappy - process-sources - - - - - - - - - - - - - run - - - - build-rocksjava - process-resources - - - - - - - - - - - - - - - - - run - - - - build-rocks-tools - process-classes + + + rocks_tools_native + + + rocks_tools_native + + + + 23 + true + + + + + com.googlecode.maven-download-plugin + download-maven-plugin + + + rocksdb source download + generate-sources + + wget + + + https://github.com/facebook/rocksdb/archive/refs/tags/v${rocksdb.version}.tar.gz + rocksdb-v${rocksdb.version}.tar.gz + ${project.build.directory}/rocksdb + + + + zlib source download + generate-sources + + wget + + + https://zlib.net/zlib-${zlib.version}.tar.gz + zlib-${zlib.version}.tar.gz + ${project.build.directory}/zlib + + + + bzip2 source download + generate-sources + + wget + + + https://sourceware.org/pub/bzip2/bzip2-${bzip2.version}.tar.gz + bzip2-v${bzip2.version}.tar.gz + ${project.build.directory}/bzip2 + + + + lz4 source download + generate-sources + + wget + + + https://github.com/lz4/lz4/archive/refs/tags/v${lz4.version}.tar.gz + lz4-v${lz4.version}.tar.gz + ${project.build.directory}/lz4 + + + + snappy source download + generate-sources + + wget + + + https://github.com/google/snappy/archive/refs/tags/${snappy.version}.tar.gz + snappy-v${snappy.version}.tar.gz + ${project.build.directory}/snappy + + + + zstd source download + generate-sources + + wget + + + https://github.com/facebook/zstd/archive/refs/tags/v${zstd.version}.tar.gz + zstd-v${zstd.version}.tar.gz + ${project.build.directory}/zstd + + + + + + + org.apache.maven.plugins + maven-patch-plugin + 1.1.1 - - - - - - - - - - - - - - - - - - - - + ${basedir}/src/main/patches/rocks-native.patch + 1 + ${project.build.directory}/rocksdb/rocksdb-${rocksdb.version} - - run - - - - copy-lib-file - prepare-package + + + patch + process-sources + + apply + + + + + + maven-antrun-plugin + + + unzip-artifact + generate-sources + + + + + + + + + + + + run + + + + build-zlib + process-sources + + + + + + + + + + + + run + + + + build-bzip2 + process-sources + + + + + + + + + run + + + + build-lz4 + process-sources + + + + + + + + + run + + + + build-zstd + process-sources + + + + + + + + + run + + + + build-snappy + process-sources + + + + + + + + + + + + + run + + + + build-rocksjava + process-resources + + + + + + + + + + + + + + + + + run + + + + build-rocks-tools + process-classes + + + + + + + + + + + + + + + + + + + + + + + + run + + + + copy-lib-file + prepare-package + + + + + + + + + run + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} - - - - - + + **/*.class + **/lib*.dylib + **/lib*.so + **/lib*.jnilib + **/lib*.dll + - - run - - - - - - org.apache.maven.plugins - maven-jar-plugin - ${maven-jar-plugin.version} - - - **/*.class - **/lib*.dylib - **/lib*.so - **/lib*.jnilib - **/lib*.dll - - - - - - - + + + + java-8 1.8 + + rocks_tools_native + @@ -351,9 +368,9 @@ java-11 + 11 - javac.version - 11 + rocks_tools_native diff --git a/hadoop-ozone/dist/pom.xml b/hadoop-ozone/dist/pom.xml index 659c39c3d1c5..81a608cd2d6c 100644 --- a/hadoop-ozone/dist/pom.xml +++ b/hadoop-ozone/dist/pom.xml @@ -75,7 +75,8 @@ *.classpath hdds-server-scm,ozone-common,ozone-csi,ozone-datanode,ozone-insight, - ozone-manager,ozone-recon,ozone-s3gateway,ozone-tools + ozone-manager,ozone-recon,ozone-s3gateway,ozone-tools,hdds-rocks-native, + hdds-rocks-native @@ -223,6 +224,10 @@ org.apache.ozone ozone-insight + + org.apache.ozone + hdds-rocks-native + diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml index dcac19b46db2..4daa2edf7687 100644 --- a/hadoop-ozone/pom.xml +++ b/hadoop-ozone/pom.xml @@ -262,6 +262,11 @@ bcprov-jdk15on ${bouncycastle.version} + + org.apache.ozone + hdds-rocks-native + ${hdds.version} + From 4038205598d283e04fc88a7a79c41bd65ba2ecd3 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 3 Mar 2023 21:56:43 -0800 Subject: [PATCH 27/51] HDDS-8028: Add Rocksdb Patch file --- .../src/main/patches/rocks-native.patch | 504 ++++++++++++++++++ 1 file changed, 504 insertions(+) create mode 100644 hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch diff --git a/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch b/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch new file mode 100644 index 000000000000..3b35136587ac --- /dev/null +++ b/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch @@ -0,0 +1,504 @@ +diff --git a/include/rocksdb/sst_dump_tool.h b/include/rocksdb/sst_dump_tool.h +index 9261ba47d..09ed123e5 100644 +--- a/include/rocksdb/sst_dump_tool.h ++++ b/include/rocksdb/sst_dump_tool.h +@@ -11,7 +11,8 @@ namespace ROCKSDB_NAMESPACE { + + class SSTDumpTool { + public: +- int Run(int argc, char const* const* argv, Options options = Options()); ++ int Run(int argc, char const* const* argv,Options options = Options(), ++ FILE* out = stdout, FILE* err = stderr); + }; + + } // namespace ROCKSDB_NAMESPACE +diff --git a/table/sst_file_dumper.cc b/table/sst_file_dumper.cc +index eefbaaeee..2c8106298 100644 +--- a/table/sst_file_dumper.cc ++++ b/table/sst_file_dumper.cc +@@ -45,7 +45,7 @@ SstFileDumper::SstFileDumper(const Options& options, + Temperature file_temp, size_t readahead_size, + bool verify_checksum, bool output_hex, + bool decode_blob_index, const EnvOptions& soptions, +- bool silent) ++ bool silent, FILE* out, FILE* err) + : file_name_(file_path), + read_num_(0), + file_temp_(file_temp), +@@ -57,10 +57,13 @@ SstFileDumper::SstFileDumper(const Options& options, + ioptions_(options_), + moptions_(ColumnFamilyOptions(options_)), + read_options_(verify_checksum, false), +- internal_comparator_(BytewiseComparator()) { ++ internal_comparator_(BytewiseComparator()), ++ out(out), ++ err(err) ++ { + read_options_.readahead_size = readahead_size; + if (!silent_) { +- fprintf(stdout, "Process %s\n", file_path.c_str()); ++ fprintf(out, "Process %s\n", file_path.c_str()); + } + init_result_ = GetTableReader(file_name_); + } +@@ -253,17 +256,17 @@ Status SstFileDumper::ShowAllCompressionSizes( + int32_t compress_level_from, int32_t compress_level_to, + uint32_t max_dict_bytes, uint32_t zstd_max_train_bytes, + uint64_t max_dict_buffer_bytes, bool use_zstd_dict_trainer) { +- fprintf(stdout, "Block Size: %" ROCKSDB_PRIszt "\n", block_size); ++ fprintf(out, "Block Size: %" ROCKSDB_PRIszt "\n", block_size); + for (auto& i : compression_types) { + if (CompressionTypeSupported(i.first)) { +- fprintf(stdout, "Compression: %-24s\n", i.second); ++ fprintf(out, "Compression: %-24s\n", i.second); + CompressionOptions compress_opt; + compress_opt.max_dict_bytes = max_dict_bytes; + compress_opt.zstd_max_train_bytes = zstd_max_train_bytes; + compress_opt.max_dict_buffer_bytes = max_dict_buffer_bytes; + compress_opt.use_zstd_dict_trainer = use_zstd_dict_trainer; + for (int32_t j = compress_level_from; j <= compress_level_to; j++) { +- fprintf(stdout, "Compression level: %d", j); ++ fprintf(out, "Compression level: %d", j); + compress_opt.level = j; + Status s = ShowCompressionSize(block_size, i.first, compress_opt); + if (!s.ok()) { +@@ -271,7 +274,7 @@ Status SstFileDumper::ShowAllCompressionSizes( + } + } + } else { +- fprintf(stdout, "Unsupported compression type: %s.\n", i.second); ++ fprintf(out, "Unsupported compression type: %s.\n", i.second); + } + } + return Status::OK(); +@@ -307,9 +310,9 @@ Status SstFileDumper::ShowCompressionSize( + } + + std::chrono::steady_clock::time_point end = std::chrono::steady_clock::now(); +- fprintf(stdout, " Size: %10" PRIu64, file_size); +- fprintf(stdout, " Blocks: %6" PRIu64, num_data_blocks); +- fprintf(stdout, " Time Taken: %10s microsecs", ++ fprintf(out, " Size: %10" PRIu64, file_size); ++ fprintf(out, " Blocks: %6" PRIu64, num_data_blocks); ++ fprintf(out, " Time Taken: %10s microsecs", + std::to_string( + std::chrono::duration_cast(end - start) + .count()) +@@ -342,11 +345,11 @@ Status SstFileDumper::ShowCompressionSize( + : ((static_cast(not_compressed_blocks) / + static_cast(num_data_blocks)) * + 100.0); +- fprintf(stdout, " Compressed: %6" PRIu64 " (%5.1f%%)", compressed_blocks, ++ fprintf(out, " Compressed: %6" PRIu64 " (%5.1f%%)", compressed_blocks, + compressed_pcnt); +- fprintf(stdout, " Not compressed (ratio): %6" PRIu64 " (%5.1f%%)", ++ fprintf(out, " Not compressed (ratio): %6" PRIu64 " (%5.1f%%)", + ratio_not_compressed_blocks, ratio_not_compressed_pcnt); +- fprintf(stdout, " Not compressed (abort): %6" PRIu64 " (%5.1f%%)\n", ++ fprintf(out, " Not compressed (abort): %6" PRIu64 " (%5.1f%%)\n", + not_compressed_blocks, not_compressed_pcnt); + return Status::OK(); + } +@@ -362,7 +365,7 @@ Status SstFileDumper::ReadTableProperties(uint64_t table_magic_number, + /* memory_allocator= */ nullptr, prefetch_buffer); + if (!s.ok()) { + if (!silent_) { +- fprintf(stdout, "Not able to read table properties\n"); ++ fprintf(out, "Not able to read table properties\n"); + } + } + return s; +@@ -382,7 +385,7 @@ Status SstFileDumper::SetTableOptionsByMagicNumber( + + options_.table_factory.reset(bbtf); + if (!silent_) { +- fprintf(stdout, "Sst file format: block-based\n"); ++ fprintf(out, "Sst file format: block-based\n"); + } + + auto& props = table_properties_->user_collected_properties; +@@ -410,7 +413,7 @@ Status SstFileDumper::SetTableOptionsByMagicNumber( + + options_.table_factory.reset(NewPlainTableFactory(plain_table_options)); + if (!silent_) { +- fprintf(stdout, "Sst file format: plain table\n"); ++ fprintf(out, "Sst file format: plain table\n"); + } + } else { + char error_msg_buffer[80]; +@@ -427,7 +430,7 @@ Status SstFileDumper::SetOldTableOptions() { + assert(table_properties_ == nullptr); + options_.table_factory = std::make_shared(); + if (!silent_) { +- fprintf(stdout, "Sst file format: block-based(old version)\n"); ++ fprintf(out, "Sst file format: block-based(old version)\n"); + } + + return Status::OK(); +@@ -478,7 +481,7 @@ Status SstFileDumper::ReadSequential(bool print_kv, uint64_t read_num, + + if (print_kv) { + if (!decode_blob_index_ || ikey.type != kTypeBlobIndex) { +- fprintf(stdout, "%s => %s\n", ++ fprintf(out, "%s => %s\n", + ikey.DebugString(true, output_hex_).c_str(), + value.ToString(output_hex_).c_str()); + } else { +@@ -486,12 +489,12 @@ Status SstFileDumper::ReadSequential(bool print_kv, uint64_t read_num, + + const Status s = blob_index.DecodeFrom(value); + if (!s.ok()) { +- fprintf(stderr, "%s => error decoding blob index\n", ++ fprintf(err, "%s => error decoding blob index\n", + ikey.DebugString(true, output_hex_).c_str()); + continue; + } + +- fprintf(stdout, "%s => %s\n", ++ fprintf(out, "%s => %s\n", + ikey.DebugString(true, output_hex_).c_str(), + blob_index.DebugString(output_hex_).c_str()); + } +diff --git a/table/sst_file_dumper.h b/table/sst_file_dumper.h +index 7be876390..20e35ac2a 100644 +--- a/table/sst_file_dumper.h ++++ b/table/sst_file_dumper.h +@@ -22,7 +22,9 @@ class SstFileDumper { + bool verify_checksum, bool output_hex, + bool decode_blob_index, + const EnvOptions& soptions = EnvOptions(), +- bool silent = false); ++ bool silent = false, ++ FILE* out = stdout, ++ FILE* err = stderr); + + Status ReadSequential(bool print_kv, uint64_t read_num, bool has_from, + const std::string& from_key, bool has_to, +@@ -94,6 +96,8 @@ class SstFileDumper { + ReadOptions read_options_; + InternalKeyComparator internal_comparator_; + std::unique_ptr table_properties_; ++ FILE* out; ++ FILE* err; + }; + + } // namespace ROCKSDB_NAMESPACE +diff --git a/tools/sst_dump_tool.cc b/tools/sst_dump_tool.cc +index 7053366e7..b063f8e24 100644 +--- a/tools/sst_dump_tool.cc ++++ b/tools/sst_dump_tool.cc +@@ -31,7 +31,7 @@ static const std::vector> + + namespace { + +-void print_help(bool to_stderr) { ++void print_help(bool to_stderr, FILE* out, FILE* err) { + std::string supported_compressions; + for (CompressionType ct : GetSupportedCompressions()) { + if (!supported_compressions.empty()) { +@@ -43,7 +43,7 @@ void print_help(bool to_stderr) { + supported_compressions += str; + } + fprintf( +- to_stderr ? stderr : stdout, ++ to_stderr ? err : out, + R"(sst_dump --file= [--command=check|scan|raw|recompress|identify] + --file= + Path to SST file or directory containing SST files +@@ -149,7 +149,8 @@ bool ParseIntArg(const char* arg, const std::string arg_name, + } + } // namespace + +-int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { ++int SSTDumpTool::Run(int argc, char const* const* argv, Options options, ++ FILE* out, FILE* err) { + std::string env_uri, fs_uri; + const char* dir_or_file = nullptr; + uint64_t read_num = std::numeric_limits::max(); +@@ -248,7 +249,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + return curr.second == compression_type; + }); + if (iter == kCompressions.end()) { +- fprintf(stderr, "%s is not a valid CompressionType\n", ++ fprintf(err, "%s is not a valid CompressionType\n", + compression_type.c_str()); + exit(1); + } +@@ -273,7 +274,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + std::cerr << pik_status.getState() << "\n"; + retc = -1; + } +- fprintf(stdout, "key=%s\n", ikey.DebugString(true, true).c_str()); ++ fprintf(out, "key=%s\n", ikey.DebugString(true, true).c_str()); + return retc; + } else if (ParseIntArg(argv[i], "--compression_level_from=", + "compression_level_from must be numeric", +@@ -288,9 +289,9 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + "compression_max_dict_bytes must be numeric", + &tmp_val)) { + if (tmp_val < 0 || tmp_val > std::numeric_limits::max()) { +- fprintf(stderr, "compression_max_dict_bytes must be a uint32_t: '%s'\n", ++ fprintf(err, "compression_max_dict_bytes must be a uint32_t: '%s'\n", + argv[i]); +- print_help(/*to_stderr*/ true); ++ print_help(/*to_stderr*/ true, out, err); + return 1; + } + compression_max_dict_bytes = static_cast(tmp_val); +@@ -298,10 +299,10 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + "compression_zstd_max_train_bytes must be numeric", + &tmp_val)) { + if (tmp_val < 0 || tmp_val > std::numeric_limits::max()) { +- fprintf(stderr, ++ fprintf(err, + "compression_zstd_max_train_bytes must be a uint32_t: '%s'\n", + argv[i]); +- print_help(/*to_stderr*/ true); ++ print_help(/*to_stderr*/ true, out, err); + return 1; + } + compression_zstd_max_train_bytes = static_cast(tmp_val); +@@ -309,41 +310,41 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + "compression_max_dict_buffer_bytes must be numeric", + &tmp_val)) { + if (tmp_val < 0) { +- fprintf(stderr, ++ fprintf(err, + "compression_max_dict_buffer_bytes must be positive: '%s'\n", + argv[i]); +- print_help(/*to_stderr*/ true); ++ print_help(/*to_stderr*/ true, out, err); + return 1; + } + compression_max_dict_buffer_bytes = static_cast(tmp_val); + } else if (strcmp(argv[i], "--compression_use_zstd_finalize_dict") == 0) { + compression_use_zstd_finalize_dict = true; + } else if (strcmp(argv[i], "--help") == 0) { +- print_help(/*to_stderr*/ false); ++ print_help(/*to_stderr*/ false, out, err); + return 0; + } else if (strcmp(argv[i], "--version") == 0) { + printf("%s\n", GetRocksBuildInfoAsString("sst_dump").c_str()); + return 0; + } else { +- fprintf(stderr, "Unrecognized argument '%s'\n\n", argv[i]); +- print_help(/*to_stderr*/ true); ++ fprintf(err, "Unrecognized argument '%s'\n\n", argv[i]); ++ print_help(/*to_stderr*/ true, out, err); + return 1; + } + } + + if(has_compression_level_from && has_compression_level_to) { + if(!has_specified_compression_types || compression_types.size() != 1) { +- fprintf(stderr, "Specify one compression type.\n\n"); ++ fprintf(err, "Specify one compression type.\n\n"); + exit(1); + } + } else if(has_compression_level_from || has_compression_level_to) { +- fprintf(stderr, "Specify both --compression_level_from and " ++ fprintf(err, "Specify both --compression_level_from and " + "--compression_level_to.\n\n"); + exit(1); + } + + if (use_from_as_prefix && has_from) { +- fprintf(stderr, "Cannot specify --prefix and --from\n\n"); ++ fprintf(err, "Cannot specify --prefix and --from\n\n"); + exit(1); + } + +@@ -357,8 +358,8 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + } + + if (dir_or_file == nullptr) { +- fprintf(stderr, "file or directory must be specified.\n\n"); +- print_help(/*to_stderr*/ true); ++ fprintf(err, "file or directory must be specified.\n\n"); ++ print_help(/*to_stderr*/ true, out, err); + exit(1); + } + +@@ -373,10 +374,10 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + Status s = Env::CreateFromUri(config_options, env_uri, fs_uri, &options.env, + &env_guard); + if (!s.ok()) { +- fprintf(stderr, "CreateEnvFromUri: %s\n", s.ToString().c_str()); ++ fprintf(err, "CreateEnvFromUri: %s\n", s.ToString().c_str()); + exit(1); + } else { +- fprintf(stdout, "options.env is %p\n", options.env); ++ fprintf(out, "options.env is %p\n", options.env); + } + } + +@@ -390,7 +391,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + Status s = env->FileExists(dir_or_file); + // dir_or_file does not exist + if (!s.ok()) { +- fprintf(stderr, "%s%s: No such file or directory\n", s.ToString().c_str(), ++ fprintf(err, "%s%s: No such file or directory\n", s.ToString().c_str(), + dir_or_file); + return 1; + } +@@ -418,13 +419,13 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + if (dir) { + filename = std::string(dir_or_file) + "/" + filename; + } +- + ROCKSDB_NAMESPACE::SstFileDumper dumper( + options, filename, Temperature::kUnknown, readahead_size, +- verify_checksum, output_hex, decode_blob_index); ++ verify_checksum, output_hex, decode_blob_index, EnvOptions(), ++ false,out, err); + // Not a valid SST + if (!dumper.getStatus().ok()) { +- fprintf(stderr, "%s: %s\n", filename.c_str(), ++ fprintf(err, "%s: %s\n", filename.c_str(), + dumper.getStatus().ToString().c_str()); + continue; + } else { +@@ -434,7 +435,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + if (valid_sst_files.size() == 1) { + // from_key and to_key are only used for "check", "scan", or "" + if (command == "check" || command == "scan" || command == "") { +- fprintf(stdout, "from [%s] to [%s]\n", ++ fprintf(out, "from [%s] to [%s]\n", + ROCKSDB_NAMESPACE::Slice(from_key).ToString(true).c_str(), + ROCKSDB_NAMESPACE::Slice(to_key).ToString(true).c_str()); + } +@@ -449,7 +450,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + compression_zstd_max_train_bytes, compression_max_dict_buffer_bytes, + !compression_use_zstd_finalize_dict); + if (!st.ok()) { +- fprintf(stderr, "Failed to recompress: %s\n", st.ToString().c_str()); ++ fprintf(err, "Failed to recompress: %s\n", st.ToString().c_str()); + exit(1); + } + return 0; +@@ -461,10 +462,10 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + + st = dumper.DumpTable(out_filename); + if (!st.ok()) { +- fprintf(stderr, "%s: %s\n", filename.c_str(), st.ToString().c_str()); ++ fprintf(err, "%s: %s\n", filename.c_str(), st.ToString().c_str()); + exit(1); + } else { +- fprintf(stdout, "raw dump written to file %s\n", &out_filename[0]); ++ fprintf(out, "raw dump written to file %s\n", &out_filename[0]); + } + continue; + } +@@ -476,7 +477,7 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + has_from || use_from_as_prefix, from_key, has_to, to_key, + use_from_as_prefix); + if (!st.ok()) { +- fprintf(stderr, "%s: %s\n", filename.c_str(), ++ fprintf(err, "%s: %s\n", filename.c_str(), + st.ToString().c_str()); + } + total_read += dumper.GetReadNumber(); +@@ -488,10 +489,10 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + if (command == "verify") { + st = dumper.VerifyChecksum(); + if (!st.ok()) { +- fprintf(stderr, "%s is corrupted: %s\n", filename.c_str(), ++ fprintf(err, "%s is corrupted: %s\n", filename.c_str(), + st.ToString().c_str()); + } else { +- fprintf(stdout, "The file is ok\n"); ++ fprintf(out, "The file is ok\n"); + } + continue; + } +@@ -503,15 +504,15 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + table_properties_from_reader; + st = dumper.ReadTableProperties(&table_properties_from_reader); + if (!st.ok()) { +- fprintf(stderr, "%s: %s\n", filename.c_str(), st.ToString().c_str()); +- fprintf(stderr, "Try to use initial table properties\n"); ++ fprintf(err, "%s: %s\n", filename.c_str(), st.ToString().c_str()); ++ fprintf(err, "Try to use initial table properties\n"); + table_properties = dumper.GetInitTableProperties(); + } else { + table_properties = table_properties_from_reader.get(); + } + if (table_properties != nullptr) { + if (show_properties) { +- fprintf(stdout, ++ fprintf(out, + "Table Properties:\n" + "------------------------------\n" + " %s", +@@ -523,30 +524,30 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + total_index_block_size += table_properties->index_size; + total_filter_block_size += table_properties->filter_size; + if (show_properties) { +- fprintf(stdout, ++ fprintf(out, + "Raw user collected properties\n" + "------------------------------\n"); + for (const auto& kv : table_properties->user_collected_properties) { + std::string prop_name = kv.first; + std::string prop_val = Slice(kv.second).ToString(true); +- fprintf(stdout, " # %s: 0x%s\n", prop_name.c_str(), ++ fprintf(out, " # %s: 0x%s\n", prop_name.c_str(), + prop_val.c_str()); + } + } + } else { +- fprintf(stderr, "Reader unexpectedly returned null properties\n"); ++ fprintf(err, "Reader unexpectedly returned null properties\n"); + } + } + } + if (show_summary) { +- fprintf(stdout, "total number of files: %" PRIu64 "\n", total_num_files); +- fprintf(stdout, "total number of data blocks: %" PRIu64 "\n", ++ fprintf(out, "total number of files: %" PRIu64 "\n", total_num_files); ++ fprintf(out, "total number of data blocks: %" PRIu64 "\n", + total_num_data_blocks); +- fprintf(stdout, "total data block size: %" PRIu64 "\n", ++ fprintf(out, "total data block size: %" PRIu64 "\n", + total_data_block_size); +- fprintf(stdout, "total index block size: %" PRIu64 "\n", ++ fprintf(out, "total index block size: %" PRIu64 "\n", + total_index_block_size); +- fprintf(stdout, "total filter block size: %" PRIu64 "\n", ++ fprintf(out, "total filter block size: %" PRIu64 "\n", + total_filter_block_size); + } + +@@ -554,24 +555,24 @@ int SSTDumpTool::Run(int argc, char const* const* argv, Options options) { + // No valid SST files are found + // Exit with an error state + if (dir) { +- fprintf(stdout, "------------------------------\n"); +- fprintf(stderr, "No valid SST files found in %s\n", dir_or_file); ++ fprintf(out, "------------------------------\n"); ++ fprintf(err, "No valid SST files found in %s\n", dir_or_file); + } else { +- fprintf(stderr, "%s is not a valid SST file\n", dir_or_file); ++ fprintf(err, "%s is not a valid SST file\n", dir_or_file); + } + return 1; + } else { + if (command == "identify") { + if (dir) { +- fprintf(stdout, "------------------------------\n"); +- fprintf(stdout, "List of valid SST files found in %s:\n", dir_or_file); ++ fprintf(out, "------------------------------\n"); ++ fprintf(out, "List of valid SST files found in %s:\n", dir_or_file); + for (const auto& f : valid_sst_files) { +- fprintf(stdout, "%s\n", f.c_str()); ++ fprintf(out, "%s\n", f.c_str()); + } +- fprintf(stdout, "Number of valid SST files: %zu\n", ++ fprintf(out, "Number of valid SST files: %zu\n", + valid_sst_files.size()); + } else { +- fprintf(stdout, "%s is a valid SST file\n", dir_or_file); ++ fprintf(out, "%s is a valid SST file\n", dir_or_file); + } + } + // At least one valid SST From 1d1111c861a8b080d7399628685c85e307c58dee Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 3 Mar 2023 22:00:57 -0800 Subject: [PATCH 28/51] HDDS-8028: Add docs --- .../utils/db/managed/ManagedSSTDumpTool.java | 4 ++++ .../utils/db/managed/PipeInputStream.java | 18 +++++++++++++++ .../rocks-native/src/main/native/Pipe.cpp | 20 ++++++++++++++--- .../rocks-native/src/main/native/Pipe.h | 20 ++++++++++++++--- .../src/main/native/PipeInputStream.cpp | 22 +++++++++++++++---- .../main/native/cplusplus_to_java_convert.h | 21 ++++++++++++++---- 6 files changed, 91 insertions(+), 14 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index 93d55d49b9a8..bc9b7690ad8a 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -69,6 +69,10 @@ public SSTDumpToolTask run(Map args, ManagedOptions options) private native int runInternal(String[] args, long optionsHandle, long pipeHandle); + + /** + * Class holding piped output of SST Dumptool & future of command. + */ static class SSTDumpToolTask { private Future future; private PipeInputStream pipedOutput; diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java index aecbec527dea..ca79d417a0b8 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hdds.utils.db.managed; import org.apache.hadoop.hdds.utils.NativeLibraryLoader; diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp index 310c67f7e7b3..f311ed50c260 100644 --- a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp @@ -1,6 +1,20 @@ -// -// Created by Swaminathan Balachandran on 3/2/23. -// +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ #include "Pipe.h" #include diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.h b/hadoop-hdds/rocks-native/src/main/native/Pipe.h index e7de632033c9..32277c5e20b8 100644 --- a/hadoop-hdds/rocks-native/src/main/native/Pipe.h +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.h @@ -1,6 +1,20 @@ -// -// Created by Swaminathan Balachandran on 3/2/23. -// +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ #ifndef UNTITLED_PIPE_H #define UNTITLED_PIPE_H diff --git a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp index 8ac9ae7334e2..ce865cbd35b4 100644 --- a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp @@ -1,9 +1,23 @@ -// -// Created by Swaminathan Balachandran on 3/2/23. -// +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + #include #include -//#include #include "Pipe.h" #include "cplusplus_to_java_convert.h" #include "org_apache_hadoop_hdds_utils_db_managed_PipeInputStream.h" diff --git a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h index 0eea6fa2c495..85f054e6212d 100644 --- a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h +++ b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h @@ -1,7 +1,20 @@ -// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. -// This source code is licensed under both the GPLv2 (found in the -// COPYING file in the root directory) and Apache 2.0 License -// (found in the LICENSE.Apache file in the root directory). +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ #pragma once From 6effac62ecdad9a4920fc527e00090d546000685 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 00:22:55 -0800 Subject: [PATCH 29/51] HDDS-8028: Address review comments --- hadoop-hdds/rocks-native/pom.xml | 3 +-- hadoop-hdds/rocks-native/src/CMakeLists.txt | 4 +-- .../hdds/utils/NativeLibraryLoader.java | 16 ++++++++---- .../utils/db/managed/ManagedSSTDumpTool.java | 2 +- .../hdds/utils/db/managed/package-info.java | 2 +- .../hadoop/hdds/utils/package-info.java | 2 +- .../rocks-native/src/main/native/Pipe.cpp | 3 +++ .../rocks-native/src/main/native/Pipe.h | 18 ++++++++----- .../src/main/native/SSTDumpTool.cpp | 26 +++++++++---------- pom.xml | 11 ++++---- 10 files changed, 49 insertions(+), 38 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 75fbfe42d0e7..bbd9d1c52832 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -129,7 +129,6 @@ ${project.build.directory}/zstd - @@ -261,7 +260,7 @@ - + diff --git a/hadoop-hdds/rocks-native/src/CMakeLists.txt b/hadoop-hdds/rocks-native/src/CMakeLists.txt index de20ff2513e8..84463fc169e1 100644 --- a/hadoop-hdds/rocks-native/src/CMakeLists.txt +++ b/hadoop-hdds/rocks-native/src/CMakeLists.txt @@ -20,7 +20,7 @@ # CMake configuration. # -cmake_minimum_required(VERSION 3.1 FATAL_ERROR) +cmake_minimum_required(VERSION 2.8) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC") project(ozone_native) @@ -75,4 +75,4 @@ if(${SST_DUMP_INCLUDE}) set(linked_libraries ${linked_libraries} bz2 zlib rocks_tools rocksdb lz4 snappy zstd) endif() add_library(ozone_rocksdb_tools SHARED ${SOURCE_FILES}) -target_link_libraries(ozone_rocksdb_tools ${linked_libraries}) \ No newline at end of file +target_link_libraries(ozone_rocksdb_tools ${linked_libraries}) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java index 322c54954ec9..565c5b93af32 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdds.utils; - import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -58,11 +57,15 @@ public static String getJniLibraryFileName(String libraryName) { } public static boolean isMac() { - return OS.contains("mac"); + return OS.startsWith("mac"); } public static boolean isWindows() { - return OS.contains("win"); + return OS.startsWith("win"); + } + + public static boolean isLinux() { + return OS.startsWith("linux"); } private static String getLibOsSuffix() { @@ -70,9 +73,12 @@ private static String getLibOsSuffix() { return ".dylib"; } else if (isWindows()) { return ".dll"; + } else if (isLinux()) { + return ".so"; } - return ".so"; + throw new UnsatisfiedLinkError(String.format("Unsupported OS %s", OS)); } + private static String appendLibOsSuffix(String libraryFileName) { return libraryFileName + getLibOsSuffix(); } @@ -108,6 +114,7 @@ public synchronized boolean loadLibrary(final String libraryName) { this.librariesLoaded.put(libraryName, loaded); return isLibraryLoaded(libraryName); } + private Optional copyResourceFromJarToTemp(final String libraryName) throws IOException { final String libraryFileName = getJniLibraryFileName(libraryName); @@ -128,7 +135,6 @@ private Optional copyResourceFromJarToTemp(final String libraryName) Files.copy(is, temp.toPath(), StandardCopyOption.REPLACE_EXISTING); return Optional.ofNullable(temp); - } finally { if (is != null) { is.close(); diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index bc9b7690ad8a..45f246521e8f 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -68,7 +68,7 @@ public SSTDumpToolTask run(Map args, ManagedOptions options) } private native int runInternal(String[] args, long optionsHandle, - long pipeHandle); + long pipeHandle); /** * Class holding piped output of SST Dumptool & future of command. diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java index 310afc30ca80..2388b6ab083c 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/package-info.java @@ -21,4 +21,4 @@ * internal annotations and related code as needed, if needed. */ -package org.apache.hadoop.hdds.utils.db.managed; \ No newline at end of file +package org.apache.hadoop.hdds.utils.db.managed; diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java index 2519c13b256a..4b605616617d 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/package-info.java @@ -21,4 +21,4 @@ * internal annotations and related code as needed, if needed. */ -package org.apache.hadoop.hdds.utils; \ No newline at end of file +package org.apache.hadoop.hdds.utils; diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp index f311ed50c260..4a42d35af346 100644 --- a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp @@ -19,6 +19,9 @@ #include "Pipe.h" #include +const int Pipe::READ_FILE_DESCRIPTOR_IDX = 0; +const int Pipe::WRITE_FILE_DESCRIPTOR_IDX = 1; + Pipe::Pipe() { pipe(p); open = true; diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.h b/hadoop-hdds/rocks-native/src/main/native/Pipe.h index 32277c5e20b8..aa75c6311cbc 100644 --- a/hadoop-hdds/rocks-native/src/main/native/Pipe.h +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.h @@ -16,23 +16,28 @@ * limitations under the License. */ -#ifndef UNTITLED_PIPE_H -#define UNTITLED_PIPE_H - +#ifndef ROCKS_NATIVE_PIPE_H +#define ROCKS_NATIVE_PIPE_H #include class Pipe { public: + static const int READ_FILE_DESCRIPTOR_IDX; + static const int WRITE_FILE_DESCRIPTOR_IDX; Pipe(); ~Pipe(); void close(); int getReadFd() { - return p[0]; + return getPipeFileDescriptorIndex(READ_FILE_DESCRIPTOR_IDX); } int getWriteFd() { - return p[1]; + return getPipeFileDescriptorIndex(WRITE_FILE_DESCRIPTOR_IDX); + } + + int getPipeFileDescriptorIndex(int idx) { + return p[idx]; } bool isOpen() { @@ -47,5 +52,4 @@ class Pipe { }; - -#endif //UNTITLED_PIPE_H +#endif //ROCKS_NATIVE_PIPE_H diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index 903e372b8e83..0d40e23c6bf0 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -19,31 +19,31 @@ #include "org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool.h" #include "rocksdb/options.h" #include "rocksdb/sst_dump_tool.h" -#include "string" +#include #include "cplusplus_to_java_convert.h" #include "Pipe.h" -#include "iostream" +#include -jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, jobjectArray argsArray, -jlong optionsHandle, jlong pipeHandle) { +jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, + jobjectArray argsArray, jlong optionsHandle, jlong pipeHandle) { ROCKSDB_NAMESPACE::SSTDumpTool dumpTool; - ROCKSDB_NAMESPACE::Options* options = reinterpret_cast(optionsHandle); - Pipe* pipe = reinterpret_cast(pipeHandle); + ROCKSDB_NAMESPACE::Options *options = reinterpret_cast(optionsHandle); + Pipe *pipe = reinterpret_cast(pipeHandle); int length = env->GetArrayLength(argsArray); - char* args[length + 1]; + char *args[length + 1]; args[0] = strdup("./sst_dump"); - for(int i = 0; i < length; i++) { + for (int i = 0; i < length; i++) { jstring str_val = (jstring)env->GetObjectArrayElement(argsArray, (jsize)i); - char* utf_str = (char*)env->GetStringUTFChars(str_val, JNI_FALSE); - args[i+1] = strdup(utf_str); + char *utf_str = (char *)env->GetStringUTFChars(str_val, JNI_FALSE); + args[i + 1] = strdup(utf_str); env->ReleaseStringUTFChars(str_val, utf_str); } - FILE* wr = fdopen(pipe->getWriteFd(), "w"); + FILE *wr = fdopen(pipe->getWriteFd(), "w"); int ret = dumpTool.Run(length + 1, args, *options, wr); - for (int i = 0; iclose(); return ret; -} \ No newline at end of file +} diff --git a/pom.xml b/pom.xml index 7851f65d4134..28d166728f60 100644 --- a/pom.xml +++ b/pom.xml @@ -298,7 +298,6 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs 1.9.3 1.1.8 1.4.9 - @@ -1540,11 +1539,6 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs curator-client ${curator.version} - - com.googlecode.maven-download-plugin - download-maven-plugin - ${download-maven-plugin.version} - @@ -1811,6 +1805,11 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs docker-maven-plugin ${docker-maven-plugin.version} + + com.googlecode.maven-download-plugin + download-maven-plugin + ${download-maven-plugin.version} + From 02ccdc8c0e16fe8ca6e767550b0fa00e5cfc0f27 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 00:25:53 -0800 Subject: [PATCH 30/51] HDDS-8028: Add additional line --- hadoop-hdds/rocks-native/pom.xml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index bbd9d1c52832..8672e04abe9f 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -402,6 +402,4 @@ - - - \ No newline at end of file + From 77ef6c244cef22c91aa39a8824e0b57870065da5 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 10:29:33 -0800 Subject: [PATCH 31/51] HDDS-8028: Add license --- .../src/main/patches/rocks-native.patch | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch b/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch index 3b35136587ac..c58a9a12f124 100644 --- a/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch +++ b/hadoop-hdds/rocks-native/src/main/patches/rocks-native.patch @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + diff --git a/include/rocksdb/sst_dump_tool.h b/include/rocksdb/sst_dump_tool.h index 9261ba47d..09ed123e5 100644 --- a/include/rocksdb/sst_dump_tool.h From d42c6b23f99e4084d60ffcc9f3441e293e8d5faa Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 10:34:24 -0800 Subject: [PATCH 32/51] HDDS-8028: Add rocks-native-jar to jar report --- .../dist/src/main/license/bin/LICENSE.txt | 2 +- .../dist/src/main/license/jar-report.txt | 39 ++++++++++--------- 2 files changed, 21 insertions(+), 20 deletions(-) diff --git a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt index 98bef1a7dcfb..ded7ed07c453 100644 --- a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt +++ b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt @@ -509,7 +509,7 @@ WTFPL -------------------------------------------------------------------------------- -hdds-server-scm, ozone-manager, ozone-s3gateway and hdds-server-framework +hdds-server-scm, ozone-manager, ozone-s3gateway, hdds-rocks-native and hdds-server-framework contains the source of the following javascript/css components (See licenses/ for text of these licenses): Apache Software Foundation License 2.0 diff --git a/hadoop-ozone/dist/src/main/license/jar-report.txt b/hadoop-ozone/dist/src/main/license/jar-report.txt index 5f72a0c6a396..c0c3351e9f29 100644 --- a/hadoop-ozone/dist/src/main/license/jar-report.txt +++ b/hadoop-ozone/dist/src/main/license/jar-report.txt @@ -1,10 +1,11 @@ +share/ozone/lib/FastInfoset.jar share/ozone/lib/accessors-smart.jar share/ozone/lib/activation.jar share/ozone/lib/animal-sniffer-annotations.jar share/ozone/lib/annotations.jar share/ozone/lib/annotations.jar -share/ozone/lib/aopalliance.jar share/ozone/lib/aopalliance-repackaged.jar +share/ozone/lib/aopalliance.jar share/ozone/lib/asm.jar share/ozone/lib/aspectjrt.jar share/ozone/lib/aspectjweaver.jar @@ -26,8 +27,8 @@ share/ozone/lib/commons-configuration2.jar share/ozone/lib/commons-daemon.jar share/ozone/lib/commons-digester.jar share/ozone/lib/commons-io.jar -share/ozone/lib/commons-lang3.jar share/ozone/lib/commons-lang.jar +share/ozone/lib/commons-lang3.jar share/ozone/lib/commons-logging.jar share/ozone/lib/commons-net.jar share/ozone/lib/commons-pool2.jar @@ -38,22 +39,21 @@ share/ozone/lib/disruptor.jar share/ozone/lib/dnsjava.jar share/ozone/lib/error_prone_annotations.jar share/ozone/lib/failureaccess.jar -share/ozone/lib/FastInfoset.jar share/ozone/lib/gethostname4j.jar share/ozone/lib/grpc-api.jar share/ozone/lib/grpc-context.jar share/ozone/lib/grpc-core.jar share/ozone/lib/grpc-netty.jar -share/ozone/lib/grpc-protobuf.jar share/ozone/lib/grpc-protobuf-lite.jar +share/ozone/lib/grpc-protobuf.jar share/ozone/lib/grpc-stub.jar share/ozone/lib/gson.jar share/ozone/lib/guava-jre.jar share/ozone/lib/guice-assistedinject.jar share/ozone/lib/guice-bridge.jar -share/ozone/lib/guice.jar share/ozone/lib/guice-multibindings.jar share/ozone/lib/guice-servlet.jar +share/ozone/lib/guice.jar share/ozone/lib/hadoop-annotations.jar share/ozone/lib/hadoop-auth.jar share/ozone/lib/hadoop-common.jar @@ -74,6 +74,7 @@ share/ozone/lib/hdds-hadoop-dependency-server.jar share/ozone/lib/hdds-interface-admin.jar share/ozone/lib/hdds-interface-client.jar share/ozone/lib/hdds-interface-server.jar +share/ozone/lib/hdds-rocks-native.jar share/ozone/lib/hdds-server-framework.jar share/ozone/lib/hdds-server-scm.jar share/ozone/lib/hdds-tools.jar @@ -83,8 +84,8 @@ share/ozone/lib/hk2-utils.jar share/ozone/lib/hppc.jar share/ozone/lib/httpasyncclient.jar share/ozone/lib/httpclient.jar -share/ozone/lib/httpcore.jar share/ozone/lib/httpcore-nio.jar +share/ozone/lib/httpcore.jar share/ozone/lib/httpmime.jar share/ozone/lib/ion-java.jar share/ozone/lib/istack-commons-runtime.jar @@ -141,12 +142,12 @@ share/ozone/lib/jetty-util.jar share/ozone/lib/jetty-webapp.jar share/ozone/lib/jetty-xml.jar share/ozone/lib/jmespath-java.jar -share/ozone/lib/jna.jar share/ozone/lib/jna-platform.jar +share/ozone/lib/jna.jar share/ozone/lib/joda-time.jar share/ozone/lib/jooq-codegen.jar -share/ozone/lib/jooq.jar share/ozone/lib/jooq-meta.jar +share/ozone/lib/jooq.jar share/ozone/lib/jsch.jar share/ozone/lib/json-smart.jar share/ozone/lib/jsp-api.jar @@ -165,13 +166,13 @@ share/ozone/lib/log4j-core.jar share/ozone/lib/metainf-services.jar share/ozone/lib/metrics-core.jar share/ozone/lib/netty-buffer.Final.jar -share/ozone/lib/netty-codec.Final.jar -share/ozone/lib/netty-codec-http2.Final.jar share/ozone/lib/netty-codec-http.Final.jar +share/ozone/lib/netty-codec-http2.Final.jar share/ozone/lib/netty-codec-socks.Final.jar +share/ozone/lib/netty-codec.Final.jar share/ozone/lib/netty-common.Final.jar -share/ozone/lib/netty-handler.Final.jar share/ozone/lib/netty-handler-proxy.Final.jar +share/ozone/lib/netty-handler.Final.jar share/ozone/lib/netty-resolver.Final.jar share/ozone/lib/netty-tcnative-boringssl-static.Final-linux-aarch_64.jar share/ozone/lib/netty-tcnative-boringssl-static.Final-linux-x86_64.jar @@ -180,10 +181,10 @@ share/ozone/lib/netty-tcnative-boringssl-static.Final-osx-x86_64.jar share/ozone/lib/netty-tcnative-boringssl-static.Final-windows-x86_64.jar share/ozone/lib/netty-tcnative-boringssl-static.Final.jar share/ozone/lib/netty-tcnative-classes.Final.jar -share/ozone/lib/netty-transport.Final.jar share/ozone/lib/netty-transport-classes-epoll.Final.jar share/ozone/lib/netty-transport-native-epoll.Final-linux-x86_64.jar share/ozone/lib/netty-transport-native-unix-common.Final.jar +share/ozone/lib/netty-transport.Final.jar share/ozone/lib/nimbus-jose-jwt.jar share/ozone/lib/okhttp.jar share/ozone/lib/okio.jar @@ -199,23 +200,23 @@ share/ozone/lib/ozone-csi.jar share/ozone/lib/ozone-datanode.jar share/ozone/lib/ozone-filesystem-common.jar share/ozone/lib/ozone-filesystem-hadoop2.jar +share/ozone/lib/ozone-filesystem-hadoop3-client.jar share/ozone/lib/ozone-filesystem-hadoop3.jar share/ozone/lib/ozone-filesystem.jar -share/ozone/lib/ozone-filesystem-hadoop3-client.jar share/ozone/lib/ozone-insight.jar share/ozone/lib/ozone-interface-client.jar share/ozone/lib/ozone-interface-storage.jar share/ozone/lib/ozone-manager.jar -share/ozone/lib/ozone-reconcodegen.jar share/ozone/lib/ozone-recon.jar +share/ozone/lib/ozone-reconcodegen.jar share/ozone/lib/ozone-s3gateway.jar share/ozone/lib/ozone-tools.jar share/ozone/lib/perfmark-api.jar share/ozone/lib/picocli.jar +share/ozone/lib/proto-google-common-protos.jar +share/ozone/lib/protobuf-java-util.jar share/ozone/lib/protobuf-java.jar share/ozone/lib/protobuf-java.jar -share/ozone/lib/protobuf-java-util.jar -share/ozone/lib/proto-google-common-protos.jar share/ozone/lib/ranger-intg.jar share/ozone/lib/ranger-plugin-classloader.jar share/ozone/lib/ranger-plugins-audit.jar @@ -233,12 +234,12 @@ share/ozone/lib/ratis-thirdparty-misc.jar share/ozone/lib/ratis-tools.jar share/ozone/lib/re2j.jar share/ozone/lib/reflections.jar -share/ozone/lib/rocksdb-checkpoint-differ.jar share/ozone/lib/reload4j.jar +share/ozone/lib/rocksdb-checkpoint-differ.jar share/ozone/lib/rocksdbjni.jar +share/ozone/lib/simpleclient.jar share/ozone/lib/simpleclient_common.jar share/ozone/lib/simpleclient_dropwizard.jar -share/ozone/lib/simpleclient.jar share/ozone/lib/slf4j-api.jar share/ozone/lib/slf4j-reload4j.jar share/ozone/lib/snakeyaml.jar @@ -249,8 +250,8 @@ share/ozone/lib/spring-jcl.jar share/ozone/lib/spring-jdbc.jar share/ozone/lib/spring-tx.jar share/ozone/lib/sqlite-jdbc.jar -share/ozone/lib/stax2-api.jar share/ozone/lib/stax-ex.jar +share/ozone/lib/stax2-api.jar share/ozone/lib/txw2.jar share/ozone/lib/weld-servlet-shaded.Final.jar share/ozone/lib/woodstox-core.jar From 4d277833e5286e96a47eb83f43b311daaf6768f4 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 10:41:44 -0800 Subject: [PATCH 33/51] HDDS-8028: Revert import order change --- .../hadoop/hdds/utils/HddsServerUtil.java | 45 ++++++++++--------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java index 68423e608574..33d8c178c72b 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java @@ -17,6 +17,21 @@ package org.apache.hadoop.hdds.utils; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + import com.google.common.base.Strings; import com.google.protobuf.BlockingService; import org.apache.commons.compress.archivers.ArchiveEntry; @@ -48,24 +63,6 @@ import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.security.UserGroupInformation; -import org.rocksdb.RocksDBException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Optional; -import java.util.OptionalInt; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.apache.hadoop.hdds.DFSConfigKeysLegacy.DFS_DATANODE_DATA_DIR_KEY; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL; @@ -81,19 +78,23 @@ import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_COUNT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_COUNT_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL_DEFAULT; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION; -import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_INFO_WAIT_DURATION_DEFAULT; import static org.apache.hadoop.hdds.server.ServerUtils.sanitizeUserArgs; import static org.apache.hadoop.ozone.OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR; +import org.rocksdb.RocksDBException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * Hdds stateless helper functions for server side components. */ From ada8e92cde28738c20d15aed0b60e1104a956f22 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 11:42:28 -0800 Subject: [PATCH 34/51] HDDS-8028: Remove last \n character from value --- .../db/managed/ManagedSSTDumpIterator.java | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index acfa32ecb607..4c46e9f4f175 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -26,6 +26,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Iterator; +import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -143,7 +144,8 @@ public KeyValue next() { int numberOfCharsRead = processOutput.read(charBuffer); if (numberOfCharsRead < 0) { if (currentKey != null) { - currentKey.setValue(stdoutString.toString()); + currentKey.setValue(stdoutString.substring(0, + Math.max(stdoutString.length() -1, 0))); } return currentKey; } @@ -155,7 +157,7 @@ public KeyValue next() { } if (currentKey != null) { currentKey.setValue(stdoutString.substring(prevMatchEndIndex, - currentMatcher.start())); + currentMatcher.start() -1)); } prevMatchEndIndex = currentMatcher.end(); nextKey = new KeyValue( @@ -235,4 +237,20 @@ public String toString() { '}'; } } + + public static void main(String[] args) throws NativeLibraryNotLoadedException, IOException { + ManagedSSTDumpTool sstDumpTool = + new ManagedSSTDumpTool(new ForkJoinPool(), 50); + try (ManagedOptions options = new ManagedOptions(); + ManagedSSTDumpIterator iterator = new ManagedSSTDumpIterator(sstDumpTool, + "/Users/sbalachandran/Documents/code/dummyrocks/rocks/000025.sst", options, 2000); + ) { + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + + } } From 32abb5bc1e82ed575d3e2c4364587fd6add19747 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 11:43:19 -0800 Subject: [PATCH 35/51] HDDS-8028: Remove main function --- .../utils/db/managed/ManagedSSTDumpIterator.java | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 4c46e9f4f175..cec62ae752b9 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -237,20 +237,4 @@ public String toString() { '}'; } } - - public static void main(String[] args) throws NativeLibraryNotLoadedException, IOException { - ManagedSSTDumpTool sstDumpTool = - new ManagedSSTDumpTool(new ForkJoinPool(), 50); - try (ManagedOptions options = new ManagedOptions(); - ManagedSSTDumpIterator iterator = new ManagedSSTDumpIterator(sstDumpTool, - "/Users/sbalachandran/Documents/code/dummyrocks/rocks/000025.sst", options, 2000); - ) { - while (iterator.hasNext()) { - System.out.println(iterator.next()); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - - } } From 2e04b1aeacf496a3bc453f45e0f0e2b82de8f64f Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Sat, 4 Mar 2023 12:10:11 -0800 Subject: [PATCH 36/51] HDDS-8028: Fix checkstyle issue --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index cec62ae752b9..8056c3ac0293 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -26,7 +26,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Iterator; -import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -145,7 +144,7 @@ public KeyValue next() { if (numberOfCharsRead < 0) { if (currentKey != null) { currentKey.setValue(stdoutString.substring(0, - Math.max(stdoutString.length() -1, 0))); + Math.max(stdoutString.length() - 1, 0))); } return currentKey; } @@ -157,7 +156,7 @@ public KeyValue next() { } if (currentKey != null) { currentKey.setValue(stdoutString.substring(prevMatchEndIndex, - currentMatcher.start() -1)); + currentMatcher.start() - 1)); } prevMatchEndIndex = currentMatcher.end(); nextKey = new KeyValue( From 88c9b4a545ddc9e3227600328fcd255fdfcced6b Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Tue, 7 Mar 2023 23:44:22 -0800 Subject: [PATCH 37/51] HDDS-8028: Change name --- hadoop-hdds/rocks-native/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 8672e04abe9f..2d08d9e44445 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -21,13 +21,13 @@ 1.4.0-SNAPSHOT 4.0.0 - Apache Ozone HDDS Rocks + Apache Ozone HDDS RocksDB Tools hdds-rocks-native org.apache.ozone - hdds-common + hdds-managed-rocksdb org.eclipse.jetty From f6260841a533caf952e3ab08a8c1d7381a5e1471 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 00:51:03 -0800 Subject: [PATCH 38/51] HDDS-8028: Address review comments --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 8056c3ac0293..7ff877a8424a 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -40,7 +40,7 @@ public class ManagedSSTDumpIterator implements private static final String SST_DUMP_TOOL_CLASS = "org.apache.hadoop.hdds.utils.db.managed.ManagedSSTDumpTool"; private static final String PATTERN_REGEX = - "'([^=>]+)' seq:([0-9]+), type:([0-9]+) =>"; + "'([^=>]+)' seq:([0-9]+), type:([0-9]+) => "; public static final int PATTERN_KEY_GROUP_NUMBER = 1; public static final int PATTERN_SEQ_GROUP_NUMBER = 2; From 7cd5c52c1d0396c12101ed61022b777ac2b85641 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 12:17:01 -0800 Subject: [PATCH 39/51] HDDS-8028: Use const variable in Pipe closing --- hadoop-hdds/rocks-native/src/main/native/Pipe.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp index 4a42d35af346..f1dd54438700 100644 --- a/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/Pipe.cpp @@ -28,8 +28,8 @@ Pipe::Pipe() { } Pipe::~Pipe() { - ::close(p[0]); - ::close(p[1]); + ::close(p[Pipe::READ_FILE_DESCRIPTOR_IDX]); + ::close(p[Pipe::WRITE_FILE_DESCRIPTOR_IDX]); } void Pipe::close() { From 18c3afae649efc676ecb78326f24f0981c9b3865 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 12:39:50 -0800 Subject: [PATCH 40/51] HDDS-8028: Fix checkstyle issues --- .../utils/db/managed/PipeInputStream.java | 1 + .../src/main/native/PipeInputStream.cpp | 8 +++---- .../main/native/cplusplus_to_java_convert.h | 21 ++++--------------- 3 files changed, 9 insertions(+), 21 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java index ca79d417a0b8..adb108ae2d01 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java @@ -82,6 +82,7 @@ public int read() { private native int readInternal(byte[] buff, int numberOfBytes, long pipeHandle); + private native void closeInternal(long pipeHandle); @Override diff --git a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp index ce865cbd35b4..27a8fe6d1a61 100644 --- a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp @@ -24,14 +24,14 @@ jlong Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_newPipe(JNIEnv *, jobject) { - Pipe* pipe = new Pipe(); + Pipe *pipe = new Pipe(); return GET_CPLUSPLUS_POINTER(pipe); } jint Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_readInternal(JNIEnv *env, jobject object, jbyteArray jbyteArray, jint capacity, jlong nativeHandle) { int cap_int = capacity; - Pipe* pipe = reinterpret_cast(nativeHandle); - jbyte* b = (env)->GetByteArrayElements(jbyteArray, JNI_FALSE); + Pipe *pipe = reinterpret_cast(nativeHandle); + jbyte *b = (env)->GetByteArrayElements(jbyteArray, JNI_FALSE); cap_int = read(pipe->getReadFd(), b, cap_int); if (cap_int == 0) { if (!pipe->isOpen()) { @@ -43,6 +43,6 @@ jint Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_readInternal(J } void Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_closeInternal(JNIEnv *env, jobject object, jlong nativeHandle) { - delete reinterpret_cast(nativeHandle); + delete reinterpret_cast(nativeHandle); } diff --git a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h index 85f054e6212d..0eea6fa2c495 100644 --- a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h +++ b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h @@ -1,20 +1,7 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. +// This source code is licensed under both the GPLv2 (found in the +// COPYING file in the root directory) and Apache 2.0 License +// (found in the LICENSE.Apache file in the root directory). #pragma once From 07aec354df526b8924286f0a7883351b1ce0c6b3 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 12:42:57 -0800 Subject: [PATCH 41/51] HDDS-8028: Fix javadoc --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 7ff877a8424a..99582bda0b33 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -108,8 +108,8 @@ private void checkSanityOfProcess() { } /** - * - * @return + * Checks the status of the process & sees if there is another record. + * @return True if next exists & false otherwise * Throws Runtime Exception in case of SST File read failure */ From 0c493837ed77a694457e35cc0604707a19fad3d7 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 12:44:59 -0800 Subject: [PATCH 42/51] HDDS-8028: Fix javadoc --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 99582bda0b33..7833e0d55a37 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -120,7 +120,7 @@ public boolean hasNext() { } /** - * + * Returns the next record from SSTDumpTool * @return next Key * Throws Runtime Exception incase of failure. */ From 16dadc5993430e68601d678c57e7681124e717ee Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 18:22:31 -0800 Subject: [PATCH 43/51] HDDS-8028: Fix exception handling & garbage collection --- .../hdds/utils/NativeLibraryLoader.java | 30 +++++++++++-------- .../db/managed/ManagedSSTDumpIterator.java | 1 + .../utils/db/managed/ManagedSSTDumpTool.java | 3 +- .../utils/db/managed/PipeInputStream.java | 3 +- .../src/main/native/SSTDumpTool.cpp | 10 +++---- 5 files changed, 26 insertions(+), 21 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java index 565c5b93af32..19c7c166c0b1 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hdds.utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -31,6 +34,9 @@ * Class to load Native Libraries. */ public class NativeLibraryLoader { + + private static final Logger LOG = + LoggerFactory.getLogger(NativeLibraryLoader.class); private static final String OS = System.getProperty("os.name").toLowerCase(); private Map librariesLoaded; private static volatile NativeLibraryLoader instance; @@ -83,8 +89,9 @@ private static String appendLibOsSuffix(String libraryFileName) { return libraryFileName + getLibOsSuffix(); } - public boolean isLibraryLoaded(final String libraryName) { - return librariesLoaded.getOrDefault(libraryName, false); + public static boolean isLibraryLoaded(final String libraryName) { + return getInstance().librariesLoaded + .getOrDefault(libraryName, false); } public synchronized boolean loadLibrary(final String libraryName) { @@ -93,23 +100,22 @@ public synchronized boolean loadLibrary(final String libraryName) { } boolean loaded = false; try { - System.loadLibrary(libraryName); - loaded = true; - } catch (final UnsatisfiedLinkError ule) { - - } - if (!loaded) { + loaded = false; try { + System.loadLibrary(libraryName); + loaded = true; + } catch (Exception e) { + + } + if (!loaded) { Optional file = copyResourceFromJarToTemp(libraryName); if (file.isPresent()) { System.load(file.get().getAbsolutePath()); loaded = true; } - - } catch (IOException e) { - } - + } catch (Exception e) { + LOG.warn("Unable to load library: {}", libraryName, e); } this.librariesLoaded.put(libraryName, loaded); return isLibraryLoaded(libraryName); diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 7833e0d55a37..a148dbf06a8e 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -26,6 +26,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Iterator; +import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java index 45f246521e8f..940438a45764 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpTool.java @@ -43,8 +43,7 @@ public class ManagedSSTDumpTool { public ManagedSSTDumpTool(ExecutorService executorService, int bufferCapacity) throws NativeLibraryNotLoadedException { - if (!NativeLibraryLoader.getInstance() - .isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { + if (!NativeLibraryLoader.isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { throw new NativeLibraryNotLoadedException( ROCKS_TOOLS_NATIVE_LIBRARY_NAME); } diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java index adb108ae2d01..741761ae49db 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/PipeInputStream.java @@ -43,8 +43,7 @@ public class PipeInputStream extends InputStream { private AtomicBoolean cleanup; PipeInputStream(int capacity) throws NativeLibraryNotLoadedException { - if (!NativeLibraryLoader.getInstance() - .isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { + if (!NativeLibraryLoader.isLibraryLoaded(ROCKS_TOOLS_NATIVE_LIBRARY_NAME)) { throw new NativeLibraryNotLoadedException( ROCKS_TOOLS_NATIVE_LIBRARY_NAME); } diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index 0d40e23c6bf0..2c551975629d 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -31,17 +31,17 @@ jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal Pipe *pipe = reinterpret_cast(pipeHandle); int length = env->GetArrayLength(argsArray); char *args[length + 1]; - args[0] = strdup("./sst_dump"); + args[0] = ""; for (int i = 0; i < length; i++) { jstring str_val = (jstring)env->GetObjectArrayElement(argsArray, (jsize)i); char *utf_str = (char *)env->GetStringUTFChars(str_val, JNI_FALSE); - args[i + 1] = strdup(utf_str); - env->ReleaseStringUTFChars(str_val, utf_str); + args[i + 1] = utf_str; } FILE *wr = fdopen(pipe->getWriteFd(), "w"); int ret = dumpTool.Run(length + 1, args, *options, wr); - for (int i = 0; i < length + 1; i++) { - free(args[i]); + for (int i = 1; i < length + 1; i++) { + jstring str_val = (jstring)env->GetObjectArrayElement(argsArray, (jsize)(i - 1)); + env->ReleaseStringUTFChars(str_val, args[i]); } fclose(wr); pipe->close(); From 1a42d5c6baedbf2b1a0d0d7867ab8de807c9f206 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Wed, 8 Mar 2023 18:23:41 -0800 Subject: [PATCH 44/51] HDDS-8028: Remove unused import --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index a148dbf06a8e..7833e0d55a37 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -26,7 +26,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Iterator; -import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; From ba5de3aa34d35334232be85773a46d8e9b4731e7 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 9 Mar 2023 05:56:24 -0800 Subject: [PATCH 45/51] HDDS-8028: Fix exception handling --- .../org/apache/hadoop/hdds/utils/NativeLibraryLoader.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java index 19c7c166c0b1..35427c822fd4 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/NativeLibraryLoader.java @@ -104,7 +104,7 @@ public synchronized boolean loadLibrary(final String libraryName) { try { System.loadLibrary(libraryName); loaded = true; - } catch (Exception e) { + } catch (Throwable e) { } if (!loaded) { @@ -114,7 +114,7 @@ public synchronized boolean loadLibrary(final String libraryName) { loaded = true; } } - } catch (Exception e) { + } catch (Throwable e) { LOG.warn("Unable to load library: {}", libraryName, e); } this.librariesLoaded.put(libraryName, loaded); From 23d37351999f5067a0dc749b3cab1ce0c6311e19 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 9 Mar 2023 10:03:53 -0800 Subject: [PATCH 46/51] HDDS-8028: Fix checkstyle --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 7833e0d55a37..e1f4758a765d 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -120,7 +120,7 @@ public boolean hasNext() { } /** - * Returns the next record from SSTDumpTool + * Returns the next record from SSTDumpTool. * @return next Key * Throws Runtime Exception incase of failure. */ From fc6993b3bd6affaff7c67bbf94ee6a344adfa8a2 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 9 Mar 2023 10:15:44 -0800 Subject: [PATCH 47/51] HDDS-8028: Fix checkstyle --- .../main/native/cplusplus_to_java_convert.h | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h index 0eea6fa2c495..efe9d4a5be24 100644 --- a/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h +++ b/hadoop-hdds/rocks-native/src/main/native/cplusplus_to_java_convert.h @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + // Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License From 9250a4db5fa0a5f086ec147641de6cdbfc47673a Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 9 Mar 2023 17:05:02 -0800 Subject: [PATCH 48/51] HDDS-8028: Address review comments --- hadoop-hdds/rocks-native/pom.xml | 2 +- .../db/managed/ManagedSSTDumpIterator.java | 92 ++++++++----------- .../src/main/native/SSTDumpTool.cpp | 1 - 3 files changed, 38 insertions(+), 57 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 2d08d9e44445..7d9518d7a5b0 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -260,7 +260,7 @@ - + diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index e1f4758a765d..2653d7de9552 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -27,8 +27,6 @@ import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -56,24 +54,19 @@ public class ManagedSSTDumpIterator implements private char[] charBuffer; private KeyValue nextKey; - private long pollIntervalMillis; private ManagedSSTDumpTool.SSTDumpToolTask sstDumpToolTask; - private Lock lock; private AtomicBoolean open; public ManagedSSTDumpIterator(ManagedSSTDumpTool sstDumpTool, String sstFilePath, - ManagedOptions options, - long pollIntervalMillis) throws IOException, + ManagedOptions options) throws IOException, NativeLibraryNotLoadedException { File sstFile = new File(sstFilePath); if (!sstFile.exists() || !sstFile.isFile()) { throw new IOException(String.format("Invalid SST File Path : %s", sstFile.getAbsolutePath())); } - this.pollIntervalMillis = pollIntervalMillis; - this.lock = new ReentrantLock(); init(sstDumpTool, sstFile, options); } @@ -126,63 +119,52 @@ public boolean hasNext() { */ @Override public KeyValue next() { - lock.lock(); - try { - checkSanityOfProcess(); - currentKey = nextKey; - nextKey = null; - while (!currentMatcher.find()) { - try { - if (prevMatchEndIndex != 0) { - stdoutString = new StringBuilder(stdoutString.substring( - prevMatchEndIndex, stdoutString.length())); - prevMatchEndIndex = 0; - currentMatcher = PATTERN_MATCHER.matcher(stdoutString); - } - Thread.sleep(pollIntervalMillis); - int numberOfCharsRead = processOutput.read(charBuffer); - if (numberOfCharsRead < 0) { - if (currentKey != null) { - currentKey.setValue(stdoutString.substring(0, - Math.max(stdoutString.length() - 1, 0))); - } - return currentKey; + checkSanityOfProcess(); + currentKey = nextKey; + nextKey = null; + while (!currentMatcher.find()) { + try { + if (prevMatchEndIndex != 0) { + stdoutString = new StringBuilder(stdoutString.substring( + prevMatchEndIndex, stdoutString.length())); + prevMatchEndIndex = 0; + currentMatcher = PATTERN_MATCHER.matcher(stdoutString); + } + int numberOfCharsRead = processOutput.read(charBuffer); + if (numberOfCharsRead < 0) { + if (currentKey != null) { + currentKey.setValue(stdoutString.substring(0, + Math.max(stdoutString.length() - 1, 0))); } - stdoutString.append(charBuffer, 0, numberOfCharsRead); - currentMatcher.reset(); - } catch (IOException | InterruptedException e) { - throw new RuntimeIOException(e); + return currentKey; } + stdoutString.append(charBuffer, 0, numberOfCharsRead); + currentMatcher.reset(); + } catch (IOException e) { + throw new RuntimeIOException(e); } - if (currentKey != null) { - currentKey.setValue(stdoutString.substring(prevMatchEndIndex, - currentMatcher.start() - 1)); - } - prevMatchEndIndex = currentMatcher.end(); - nextKey = new KeyValue( - currentMatcher.group(PATTERN_KEY_GROUP_NUMBER), - currentMatcher.group(PATTERN_SEQ_GROUP_NUMBER), - currentMatcher.group(PATTERN_TYPE_GROUP_NUMBER)); - return currentKey; - } finally { - lock.unlock(); } + if (currentKey != null) { + currentKey.setValue(stdoutString.substring(prevMatchEndIndex, + currentMatcher.start() - 1)); + } + prevMatchEndIndex = currentMatcher.end(); + nextKey = new KeyValue( + currentMatcher.group(PATTERN_KEY_GROUP_NUMBER), + currentMatcher.group(PATTERN_SEQ_GROUP_NUMBER), + currentMatcher.group(PATTERN_TYPE_GROUP_NUMBER)); + return currentKey; } @Override public synchronized void close() throws Exception { - lock.lock(); - try { - if (this.sstDumpToolTask != null) { - if (!this.sstDumpToolTask.getFuture().isDone()) { - this.sstDumpToolTask.getFuture().cancel(true); - } - this.processOutput.close(); + if (this.sstDumpToolTask != null) { + if (!this.sstDumpToolTask.getFuture().isDone()) { + this.sstDumpToolTask.getFuture().cancel(true); } - open.compareAndSet(true, false); - } finally { - lock.unlock(); + this.processOutput.close(); } + open.compareAndSet(true, false); } @Override diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index 2c551975629d..afb674a50abc 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -31,7 +31,6 @@ jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal Pipe *pipe = reinterpret_cast(pipeHandle); int length = env->GetArrayLength(argsArray); char *args[length + 1]; - args[0] = ""; for (int i = 0; i < length; i++) { jstring str_val = (jstring)env->GetObjectArrayElement(argsArray, (jsize)i); char *utf_str = (char *)env->GetStringUTFChars(str_val, JNI_FALSE); From 80019b9af2f79590e019e992e3820c66167eb29e Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Thu, 9 Mar 2023 18:35:15 -0800 Subject: [PATCH 49/51] HDDS-8028: Address review comments --- .../hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java | 2 +- hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp | 2 +- hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 2653d7de9552..33088239a71b 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -149,7 +149,7 @@ public KeyValue next() { currentMatcher.start() - 1)); } prevMatchEndIndex = currentMatcher.end(); - nextKey = new KeyValue( + nextKey = new KeyValue( currentMatcher.group(PATTERN_KEY_GROUP_NUMBER), currentMatcher.group(PATTERN_SEQ_GROUP_NUMBER), currentMatcher.group(PATTERN_TYPE_GROUP_NUMBER)); diff --git a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp index 27a8fe6d1a61..53f60cdd65af 100644 --- a/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/PipeInputStream.cpp @@ -38,7 +38,7 @@ jint Java_org_apache_hadoop_hdds_utils_db_managed_PipeInputStream_readInternal(J cap_int = -1; } } - (env)->ReleaseByteArrayElements(jbyteArray, b, 0); + env->ReleaseByteArrayElements(jbyteArray, b, 0); return cap_int; } diff --git a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp index afb674a50abc..b200f49c0b06 100644 --- a/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp +++ b/hadoop-hdds/rocks-native/src/main/native/SSTDumpTool.cpp @@ -25,7 +25,7 @@ #include jint Java_org_apache_hadoop_hdds_utils_db_managed_ManagedSSTDumpTool_runInternal(JNIEnv *env, jobject obj, - jobjectArray argsArray, jlong optionsHandle, jlong pipeHandle) { + jobjectArray argsArray, jlong optionsHandle, jlong pipeHandle) { ROCKSDB_NAMESPACE::SSTDumpTool dumpTool; ROCKSDB_NAMESPACE::Options *options = reinterpret_cast(optionsHandle); Pipe *pipe = reinterpret_cast(pipeHandle); From dbc31173a2982ea8d96a66c64aab9d21f4b4104b Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 10 Mar 2023 11:56:42 -0800 Subject: [PATCH 50/51] HDDS-8028: Address review comments --- hadoop-hdds/rocks-native/pom.xml | 18 +++++++++++++++++- .../db/managed/ManagedSSTDumpIterator.java | 8 ++++++-- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/hadoop-hdds/rocks-native/pom.xml b/hadoop-hdds/rocks-native/pom.xml index 7d9518d7a5b0..6112326cc7cd 100644 --- a/hadoop-hdds/rocks-native/pom.xml +++ b/hadoop-hdds/rocks-native/pom.xml @@ -53,6 +53,22 @@ + + org.codehaus.mojo + build-helper-maven-plugin + + + get-cpu-count + generate-sources + + cpu-count + + + system.numCores + + + + com.googlecode.maven-download-plugin download-maven-plugin @@ -260,7 +276,7 @@ - + diff --git a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java index 33088239a71b..35aaeb33b0a8 100644 --- a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java +++ b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedSSTDumpIterator.java @@ -63,8 +63,12 @@ public ManagedSSTDumpIterator(ManagedSSTDumpTool sstDumpTool, ManagedOptions options) throws IOException, NativeLibraryNotLoadedException { File sstFile = new File(sstFilePath); - if (!sstFile.exists() || !sstFile.isFile()) { - throw new IOException(String.format("Invalid SST File Path : %s", + if (!sstFile.exists()) { + throw new IOException(String.format("File in path : %s doesn't exist", + sstFile.getAbsolutePath())); + } + if (!sstFile.isFile()) { + throw new IOException(String.format("Path given: %s is not a file", sstFile.getAbsolutePath())); } init(sstDumpTool, sstFile, options); From db3300d6caf9ca468214e081152f1ce1ce1bc7a9 Mon Sep 17 00:00:00 2001 From: Swaminathan Balachandran Date: Fri, 10 Mar 2023 12:13:46 -0800 Subject: [PATCH 51/51] HDDS-8028: Address review comments --- hadoop-ozone/common/pom.xml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/hadoop-ozone/common/pom.xml b/hadoop-ozone/common/pom.xml index 0302519b5863..b3ebf71fc016 100644 --- a/hadoop-ozone/common/pom.xml +++ b/hadoop-ozone/common/pom.xml @@ -107,10 +107,6 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> org.junit.jupiter junit-jupiter-params - - org.apache.ozone - hdds-server-framework -