Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ShutdownHookManager;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -132,7 +131,7 @@ private HddsServerUtil() {
* @param server RPC server to which the protocol and implementation is added to
*/
public static void addPBProtocol(Configuration conf, Class<?> protocol,
BlockingService service, RPC.Server server) throws IOException {
BlockingService service, RPC.Server server) {
RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class);
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
}
Expand Down Expand Up @@ -670,22 +669,6 @@ public static UserGroupInformation getRemoteUser() throws IOException {
return (ugi != null) ? ugi : UserGroupInformation.getCurrentUser();
}

/**
* Converts RocksDB exception to IOE.
* @param msg - Message to add to exception.
* @param e - Original Exception.
* @return IOE.
*/
public static IOException toIOException(String msg, RocksDBException e) {
String statusCode = e.getStatus() == null ? "N/A" :
e.getStatus().getCodeString();
String errMessage = e.getMessage() == null ? "Unknown error" :
e.getMessage();
String output = msg + "; status : " + statusCode
+ "; message : " + errMessage;
return new IOException(output, e);
}

/**
* Add exception classes which server won't log at all.
* @param server
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,8 @@

package org.apache.hadoop.hdds.utils.db;

import static org.apache.hadoop.hdds.utils.HddsServerUtil.toIOException;

import com.google.common.base.Preconditions;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
Expand Down Expand Up @@ -53,7 +50,7 @@ public final class DBConfigFromFile {
private DBConfigFromFile() {
}

public static File getConfigLocation() throws IOException {
public static File getConfigLocation() {
String path = System.getenv(CONFIG_DIR);

// Make testing easy.
Expand Down Expand Up @@ -109,10 +106,9 @@ public static String getOptionsFileNameFromDB(String dbFileName) {
* @param dbFileName - The DB File Name, for example, OzoneManager.db.
* @param cfDescs - ColumnFamily Handles.
* @return DBOptions, Options to be used for opening/creating the DB.
* @throws IOException
*/
public static ManagedDBOptions readFromFile(String dbFileName,
List<ColumnFamilyDescriptor> cfDescs) throws IOException {
List<ColumnFamilyDescriptor> cfDescs) throws RocksDatabaseException {
Preconditions.checkNotNull(dbFileName);
Preconditions.checkNotNull(cfDescs);
Preconditions.checkArgument(!cfDescs.isEmpty());
Expand All @@ -133,7 +129,7 @@ public static ManagedDBOptions readFromFile(String dbFileName,
env, options, cfDescs, true);

} catch (RocksDBException rdEx) {
throw toIOException("Unable to find/open Options file.", rdEx);
throw new RocksDatabaseException("Failed to loadOptionsFromFile " + optionsFile, rdEx);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,28 +17,18 @@

package org.apache.hadoop.hdds.utils.db;

import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import org.apache.hadoop.hdds.utils.db.RocksDatabase.ColumnFamily;
import org.apache.hadoop.hdds.utils.db.managed.ManagedIngestExternalFileOptions;

/**
* DumpFileLoader using rocksdb sst files.
* Load rocksdb sst files.
*/
public class RDBSstFileLoader implements DumpFileLoader, Closeable {
final class RDBSstFileLoader {
private RDBSstFileLoader() { }

private final RocksDatabase db;
private final ColumnFamily family;

public RDBSstFileLoader(RocksDatabase db, ColumnFamily cf) {
this.db = db;
this.family = cf;
}

@Override
public void load(File externalFile) throws IOException {
static void load(RocksDatabase db, ColumnFamily family, File externalFile) throws RocksDatabaseException {
// Ingest an empty sst file results in exception.
if (externalFile.length() == 0) {
return;
Expand All @@ -51,8 +41,4 @@ public void load(File externalFile) throws IOException {
ingestOptions);
}
}

@Override
public void close() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,8 @@

package org.apache.hadoop.hdds.utils.db;

import static org.apache.hadoop.hdds.utils.HddsServerUtil.toIOException;

import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hdds.utils.db.managed.ManagedEnvOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedOptions;
Expand All @@ -31,55 +28,48 @@
/**
* DumpFileWriter using rocksdb sst files.
*/
public class RDBSstFileWriter implements DumpFileWriter, Closeable {
class RDBSstFileWriter implements Closeable {

private ManagedSstFileWriter sstFileWriter;
private File sstFile;
private AtomicLong keyCounter;
private ManagedOptions emptyOption = new ManagedOptions();
private final ManagedEnvOptions emptyEnvOptions = new ManagedEnvOptions();

public RDBSstFileWriter() {
RDBSstFileWriter(File externalFile) throws RocksDatabaseException {
this.sstFileWriter = new ManagedSstFileWriter(emptyEnvOptions, emptyOption);
this.keyCounter = new AtomicLong(0);
}

@Override
public void open(File externalFile) throws IOException {
this.sstFile = externalFile;
try {
// Here will create a new sst file each time, not append to existing
sstFileWriter.open(sstFile.getAbsolutePath());
} catch (RocksDBException e) {
closeOnFailure();
throw toIOException("Failed to open external file for dump "
+ sstFile.getAbsolutePath(), e);
throw new RocksDatabaseException("Failed to open " + sstFile, e);
}
}

@Override
public void put(byte[] key, byte[] value) throws IOException {
public void put(byte[] key, byte[] value) throws RocksDatabaseException {
try {
sstFileWriter.put(key, value);
keyCounter.incrementAndGet();
} catch (RocksDBException e) {
closeOnFailure();
throw toIOException("Failed to put kv into dump file "
+ sstFile.getAbsolutePath(), e);
throw new RocksDatabaseException("Failed to put key (length=" + key.length
+ ") and value (length=" + value.length + "), sstFile=" + sstFile.getAbsolutePath(), e);
}
}

@Override
public void close() throws IOException {
public void close() throws RocksDatabaseException {
if (sstFileWriter != null) {
try {
// We should check for empty sst file, or we'll get exception.
if (keyCounter.get() > 0) {
sstFileWriter.finish();
}
} catch (RocksDBException e) {
throw toIOException("Failed to finish dumping into file "
+ sstFile.getAbsolutePath(), e);
throw new RocksDatabaseException("Failed to finish writing to " + sstFile, e);
} finally {
closeResources();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ public RDBStore(File dbFile, ManagedDBOptions dbOptions, ManagedStatistics stati
this.dbOptions = dbOptions;
this.statistics = statistics;

Exception exception = null;
try {
if (enableCompactionDag) {
rocksDBCheckpointDiffer = RocksDBCheckpointDifferHolder.getInstance(
Expand Down Expand Up @@ -171,16 +172,20 @@ public RDBStore(File dbFile, ManagedDBOptions dbOptions, ManagedStatistics stati
checkPointManager = new RDBCheckpointManager(db, dbLocation.getName());
rdbMetrics = RDBMetrics.create();

} catch (RuntimeException e) {
exception = e;
throw new IllegalStateException("Failed to create RDBStore from " + dbFile, e);
} catch (Exception e) {
// Close DB and other things if got initialized.
close();
String msg = "Failed init RocksDB, db path : " + dbFile.getAbsolutePath()
+ ", " + "exception :" + (e.getCause() == null ?
e.getClass().getCanonicalName() + " " + e.getMessage() :
e.getCause().getClass().getCanonicalName() + " " +
e.getCause().getMessage());

throw new IOException(msg, e);
exception = e;
throw new IOException("Failed to create RDBStore from " + dbFile, e);
} finally {
if (exception != null) {
try {
close();
} catch (IOException e) {
exception.addSuppressed(e);
}
}
}

if (LOG.isDebugEnabled()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,10 +276,8 @@ public void deleteBatchWithPrefix(BatchOperation batch, byte[] prefix)
@Override
public void dumpToFileWithPrefix(File externalFile, byte[] prefix)
throws IOException {
try (TableIterator<byte[], KeyValue<byte[], byte[]>> iter
= iterator(prefix);
DumpFileWriter fileWriter = new RDBSstFileWriter()) {
fileWriter.open(externalFile);
try (TableIterator<byte[], KeyValue<byte[], byte[]>> iter = iterator(prefix);
RDBSstFileWriter fileWriter = new RDBSstFileWriter(externalFile)) {
while (iter.hasNext()) {
final KeyValue<byte[], byte[]> entry = iter.next();
fileWriter.put(entry.getKey(), entry.getValue());
Expand All @@ -288,10 +286,8 @@ public void dumpToFileWithPrefix(File externalFile, byte[] prefix)
}

@Override
public void loadFromFile(File externalFile) throws IOException {
try (DumpFileLoader fileLoader = new RDBSstFileLoader(db, family)) {
fileLoader.load(externalFile);
}
public void loadFromFile(File externalFile) throws RocksDatabaseException {
RDBSstFileLoader.load(db, family, externalFile);
}

private List<KeyValue<byte[], byte[]>> getRangeKVs(byte[] startKey,
Expand Down
Loading