|
25 | 25 | import org.apache.flink.api.java.ExecutionEnvironment;
|
26 | 26 | import org.apache.flink.api.java.tuple.Tuple1;
|
27 | 27 | import org.apache.flink.api.java.typeutils.RowTypeInfo;
|
| 28 | +import org.apache.flink.configuration.Configuration; |
28 | 29 | import org.apache.flink.connector.hbase.util.HBaseTableSchema;
|
29 | 30 | import org.apache.flink.connector.hbase.util.PlannerType;
|
30 | 31 | import org.apache.flink.connector.hbase2.source.AbstractTableInputFormat;
|
|
33 | 34 | import org.apache.flink.connector.hbase2.source.HBaseRowInputFormat;
|
34 | 35 | import org.apache.flink.connector.hbase2.source.HBaseTableSource;
|
35 | 36 | import org.apache.flink.connector.hbase2.util.HBaseTestBase;
|
| 37 | +import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; |
36 | 38 | import org.apache.flink.streaming.api.datastream.DataStream;
|
37 | 39 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
38 | 40 | import org.apache.flink.table.api.DataTypes;
|
|
46 | 48 | import org.apache.flink.table.descriptors.HBase;
|
47 | 49 | import org.apache.flink.table.descriptors.Schema;
|
48 | 50 | import org.apache.flink.table.functions.ScalarFunction;
|
| 51 | +import org.apache.flink.test.util.MiniClusterWithClientResource; |
49 | 52 | import org.apache.flink.test.util.TestBaseUtils;
|
50 | 53 | import org.apache.flink.types.Row;
|
51 | 54 | import org.apache.flink.types.RowKind;
|
|
57 | 60 | import org.apache.hadoop.hbase.client.Result;
|
58 | 61 | import org.apache.hadoop.hbase.client.Scan;
|
59 | 62 | import org.apache.hadoop.hbase.util.Bytes;
|
| 63 | +import org.junit.ClassRule; |
60 | 64 | import org.junit.Test;
|
61 | 65 | import org.junit.runner.RunWith;
|
62 | 66 | import org.junit.runners.Parameterized;
|
|
77 | 81 | @RunWith(Parameterized.class)
|
78 | 82 | public class HBaseConnectorITCase extends HBaseTestBase {
|
79 | 83 |
|
| 84 | + @ClassRule |
| 85 | + public static final MiniClusterWithClientResource MINI_CLUSTER = |
| 86 | + new MiniClusterWithClientResource( |
| 87 | + new MiniClusterResourceConfiguration.Builder() |
| 88 | + .setConfiguration(new Configuration()) |
| 89 | + .build()); |
| 90 | + |
80 | 91 | @Parameterized.Parameter public PlannerType planner;
|
81 | 92 |
|
82 | 93 | @Parameterized.Parameter(1)
|
@@ -436,8 +447,6 @@ public void testTableSink() throws Exception {
|
436 | 447 | + " AS h");
|
437 | 448 |
|
438 | 449 | TableResult tableResult2 = table.execute();
|
439 |
| - // wait to finish |
440 |
| - tableResult2.getJobClient().get().getJobExecutionResult().get(); |
441 | 450 |
|
442 | 451 | List<Row> results = CollectionUtil.iteratorToList(tableResult2.collect());
|
443 | 452 |
|
@@ -529,8 +538,6 @@ public void testTableSourceSinkWithDDL() throws Exception {
|
529 | 538 | + " AS h";
|
530 | 539 |
|
531 | 540 | TableResult tableResult3 = batchEnv.executeSql(query);
|
532 |
| - // wait to finish |
533 |
| - tableResult3.getJobClient().get().getJobExecutionResult().get(); |
534 | 541 |
|
535 | 542 | List<String> result =
|
536 | 543 | Lists.newArrayList(tableResult3.collect()).stream()
|
|
0 commit comments