Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.hudi;

import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.concurrent.ExecutorService;
Expand All @@ -29,30 +28,27 @@
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hudi.common.HoodieClientTestUtils;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.HoodieTestDataGenerator;
import org.apache.hudi.common.minicluster.HdfsTestService;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.FSUtils;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* The test harness for resource initialization and cleanup.
*/
public abstract class HoodieClientTestHarness implements Serializable {
public abstract class HoodieClientTestHarness extends HoodieCommonTestHarness implements Serializable {

private static final Logger logger = LoggerFactory.getLogger(HoodieClientTestHarness.class);

protected transient JavaSparkContext jsc = null;
protected transient SQLContext sqlContext;
protected transient FileSystem fs;
protected String basePath = null;
protected TemporaryFolder folder = null;
protected transient HoodieTestDataGenerator dataGen = null;
protected transient ExecutorService executorService;
protected transient HoodieTableMetaClient metaClient;
Expand All @@ -69,7 +65,7 @@ public abstract class HoodieClientTestHarness implements Serializable {
* @throws IOException
*/
public void initResources() throws IOException {
initTempFolderAndPath();
initPath();
initSparkContexts();
initTestDataGenerator();
initFileSystem();
Expand All @@ -85,7 +81,6 @@ public void cleanupResources() throws IOException {
cleanupSparkContexts();
cleanupTestDataGenerator();
cleanupFileSystem();
cleanupTempFolderAndPath();
}

/**
Expand Down Expand Up @@ -129,33 +124,6 @@ protected void cleanupSparkContexts() {
}
}

/**
* Initializes a temporary folder and base path.
*
* @throws IOException
*/
protected void initTempFolderAndPath() throws IOException {
folder = new TemporaryFolder();
folder.create();
basePath = folder.getRoot().getAbsolutePath();
}

/**
* Cleanups the temporary folder and base path.
*
* @throws IOException
*/
protected void cleanupTempFolderAndPath() throws IOException {
if (basePath != null) {
new File(basePath).delete();
}

if (folder != null) {
logger.info("Explicitly removing workspace used in previously run test-case");
folder.delete();
}
}

/**
* Initializes a file system with the hadoop configuration of Spark context.
*/
Expand Down Expand Up @@ -229,16 +197,6 @@ protected void cleanupTestDataGenerator() throws IOException {
dataGen = null;
}

/**
* Gets a default {@link HoodieTableType#COPY_ON_WRITE} table type.
* Sub-classes can override this method to specify a new table type.
*
* @return an instance of Hoodie table type.
*/
protected HoodieTableType getTableType() {
return HoodieTableType.COPY_ON_WRITE;
}

/**
* Initializes a distributed file system and base directory.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,18 +51,17 @@ public class TestCompactionAdminClient extends TestHoodieClientBase {

@Before
public void setUp() throws Exception {
initTempFolderAndPath();
initPath();
initSparkContexts();
metaClient = HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
client = new CompactionAdminClient(jsc, basePath);
}

@After
public void tearDown() throws Exception {
public void tearDown() {
client.close();
metaClient = null;
cleanupSparkContexts();
cleanupTempFolderAndPath();
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.hudi;

import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.fs.Path;
Expand All @@ -33,15 +32,14 @@
public class TestConsistencyGuard extends HoodieClientTestHarness {

@Before
public void setup() throws IOException {
initTempFolderAndPath();
public void setup() {
initPath();
initFileSystemWithDefaultConfiguration();
}

@After
public void tearDown() throws Exception {
cleanupFileSystem();
cleanupTempFolderAndPath();
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,13 @@ public class TestUpdateMapFunction extends HoodieClientTestHarness {

@Before
public void setUp() throws Exception {
initTempFolderAndPath();
initPath();
HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath);
initSparkContexts("TestUpdateMapFunction");
}

@After
public void tearDown() throws Exception {
cleanupTempFolderAndPath();
public void tearDown() {
cleanupSparkContexts();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public void setUp() throws Exception {
hbaseConfig = utility.getConnection().getConfiguration();
initSparkContexts("TestQPSResourceAllocator");

initTempFolderAndPath();
initPath();
basePath = folder.getRoot().getAbsolutePath() + QPS_TEST_SUFFIX_PATH;
// Initialize table
initMetaClient();
Expand All @@ -59,7 +59,6 @@ public void setUp() throws Exception {
@After
public void tearDown() throws Exception {
cleanupSparkContexts();
cleanupTempFolderAndPath();
cleanupMetaClient();
if (utility != null) {
utility.shutdownMiniCluster();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,15 +102,14 @@ public void setUp() throws Exception {
jsc.hadoopConfiguration().addResource(utility.getConfiguration());

// Create a temp folder as the base path
initTempFolderAndPath();
initPath();
initTestDataGenerator();
initMetaClient();
}

@After
public void tearDown() throws Exception {
cleanupSparkContexts();
cleanupTempFolderAndPath();
cleanupTestDataGenerator();
cleanupMetaClient();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,13 @@ public class TestHoodieIndex extends HoodieClientTestHarness {
@Before
public void setUp() throws Exception {
initSparkContexts("TestHoodieIndex");
initTempFolderAndPath();
initPath();
initMetaClient();
}

@After
public void tearDown() throws Exception {
public void tearDown() {
cleanupSparkContexts();
cleanupTempFolderAndPath();
cleanupMetaClient();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public TestHoodieBloomIndex(boolean rangePruning, boolean treeFiltering, boolean
@Before
public void setUp() throws Exception {
initSparkContexts("TestHoodieBloomIndex");
initTempFolderAndPath();
initPath();
initFileSystem();
// We have some records to be tagged (two different partitions)
schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream("/exampleSchema.txt"));
Expand All @@ -101,7 +101,6 @@ public void setUp() throws Exception {
public void tearDown() throws Exception {
cleanupSparkContexts();
cleanupFileSystem();
cleanupTempFolderAndPath();
cleanupMetaClient();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,17 +64,16 @@ public TestHoodieGlobalBloomIndex() throws Exception {
@Before
public void setUp() throws Exception {
initSparkContexts("TestHoodieGlobalBloomIndex");
initTempFolderAndPath();
initPath();
// We have some records to be tagged (two different partitions)
schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream("/exampleSchema.txt"));
schema = HoodieAvroUtils.addMetadataFields(new Schema.Parser().parse(schemaStr));
initMetaClient();
}

@After
public void tearDown() throws Exception {
public void tearDown() {
cleanupSparkContexts();
cleanupTempFolderAndPath();
cleanupMetaClient();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public class TestHoodieCommitArchiveLog extends HoodieClientTestHarness {
@Before
public void init() throws Exception {
initDFS();
initTempFolderAndPath();
initPath();
initSparkContexts("TestHoodieCommitArchiveLog");
hadoopConf = dfs.getConf();
jsc.hadoopConfiguration().addResource(dfs.getConf());
Expand All @@ -70,7 +70,6 @@ public void init() throws Exception {
@After
public void clean() throws IOException {
cleanupDFS();
cleanupTempFolderAndPath();
cleanupSparkContexts();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void setUp() throws Exception {
initSparkContexts("TestHoodieCompactor");

// Create a temp folder as the base path
initTempFolderAndPath();
initPath();
hadoopConf = HoodieTestUtils.getDefaultHadoopConf();
fs = FSUtils.getFs(basePath, hadoopConf);
metaClient = HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
Expand All @@ -71,7 +71,6 @@ public void setUp() throws Exception {
public void tearDown() throws Exception {
cleanupFileSystem();
cleanupTestDataGenerator();
cleanupTempFolderAndPath();
cleanupSparkContexts();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public class TestHoodieMergeHandle extends HoodieClientTestHarness {
@Before
public void setUp() throws Exception {
initSparkContexts("TestHoodieMergeHandle");
initTempFolderAndPath();
initPath();
initFileSystem();
initTestDataGenerator();
initMetaClient();
Expand All @@ -66,7 +66,6 @@ public void setUp() throws Exception {
public void tearDown() throws Exception {
cleanupFileSystem();
cleanupTestDataGenerator();
cleanupTempFolderAndPath();
cleanupSparkContexts();
cleanupMetaClient();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public class TestCopyOnWriteTable extends HoodieClientTestHarness {
@Before
public void setUp() throws Exception {
initSparkContexts("TestCopyOnWriteTable");
initTempFolderAndPath();
initPath();
initMetaClient();
initTestDataGenerator();
initFileSystem();
Expand All @@ -81,7 +81,6 @@ public void setUp() throws Exception {
@After
public void tearDown() throws Exception {
cleanupSparkContexts();
cleanupTempFolderAndPath();
cleanupMetaClient();
cleanupFileSystem();
cleanupTestDataGenerator();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public void init() throws IOException {
initDFS();
initSparkContexts("TestHoodieMergeOnReadTable");
jsc.hadoopConfiguration().addResource(dfs.getConf());
initTempFolderAndPath();
initPath();
dfs.mkdirs(new Path(basePath));
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
initTestDataGenerator();
Expand All @@ -92,7 +92,6 @@ public void init() throws IOException {
@After
public void clean() throws IOException {
cleanupDFS();
cleanupTempFolderAndPath();
cleanupSparkContexts();
cleanupTestDataGenerator();
}
Expand Down Expand Up @@ -968,6 +967,7 @@ public void testInsertsGeneratedIntoLogFilesRollback() throws Exception {
Thread.sleep(1000);
// Rollback again to pretend the first rollback failed partially. This should not error our
writeClient.rollback(newCommitTime);
folder.delete();
}
}

Expand Down
Loading