Skip to content

Commit 3ae6f9c

Browse files
committed
KYLIN-2434 support config kylin.source.hive.database-for-flat-table in Spark cubing
1 parent 98664f0 commit 3ae6f9c

File tree

2 files changed

+6
-5
lines changed

2 files changed

+6
-5
lines changed

engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ protected void addLayerCubingSteps(final CubingJob result, final String jobId, f
4747
sparkExecutable.setClassName(SparkCubingByLayer.class.getName());
4848
sparkExecutable.setParam(SparkCubingByLayer.OPTION_CUBE_NAME.getOpt(), seg.getRealization().getName());
4949
sparkExecutable.setParam(SparkCubingByLayer.OPTION_SEGMENT_ID.getOpt(), seg.getUuid());
50-
sparkExecutable.setParam(SparkCubingByLayer.OPTION_INPUT_PATH.getOpt(), flatTableDesc.getTableName());
50+
sparkExecutable.setParam(SparkCubingByLayer.OPTION_INPUT_TABLE.getOpt(), seg.getConfig().getHiveDatabaseForIntermediateTable() + "." + flatTableDesc.getTableName());
5151
sparkExecutable.setParam(SparkCubingByLayer.OPTION_CONF_PATH.getOpt(), KylinConfig.getKylinConfPath());
5252
sparkExecutable.setParam(SparkCubingByLayer.OPTION_OUTPUT_PATH.getOpt(), cuboidRootPath);
5353

engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java

+5-4
Original file line numberDiff line numberDiff line change
@@ -77,22 +77,23 @@
7777

7878

7979
/**
80+
* Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
8081
*/
8182
public class SparkCubingByLayer extends AbstractApplication implements Serializable {
8283

8384
protected static final Logger logger = LoggerFactory.getLogger(SparkCubingByLayer.class);
8485

85-
public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Hive Intermediate Table").create("hiveTable");
8686
public static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg().isRequired(true).withDescription("Cube Name").create(BatchConstants.ARG_CUBE_NAME);
8787
public static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName("segment").hasArg().isRequired(true).withDescription("Cube Segment Id").create("segmentId");
8888
public static final Option OPTION_CONF_PATH = OptionBuilder.withArgName("confPath").hasArg().isRequired(true).withDescription("Configuration Path").create("confPath");
8989
public static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg().isRequired(true).withDescription("Cube output path").create(BatchConstants.ARG_OUTPUT);
90+
public static final Option OPTION_INPUT_TABLE = OptionBuilder.withArgName("hiveTable").hasArg().isRequired(true).withDescription("Hive Intermediate Table").create("hiveTable");
9091

9192
private Options options;
9293

9394
public SparkCubingByLayer() {
9495
options = new Options();
95-
options.addOption(OPTION_INPUT_PATH);
96+
options.addOption(OPTION_INPUT_TABLE);
9697
options.addOption(OPTION_CUBE_NAME);
9798
options.addOption(OPTION_SEGMENT_ID);
9899
options.addOption(OPTION_CONF_PATH);
@@ -134,7 +135,7 @@ private static final void prepare() {
134135

135136
@Override
136137
protected void execute(OptionsHelper optionsHelper) throws Exception {
137-
final String hiveTable = optionsHelper.getOptionValue(OPTION_INPUT_PATH);
138+
final String hiveTable = optionsHelper.getOptionValue(OPTION_INPUT_TABLE);
138139
final String cubeName = optionsHelper.getOptionValue(OPTION_CUBE_NAME);
139140
final String segmentId = optionsHelper.getOptionValue(OPTION_SEGMENT_ID);
140141
final String confPath = optionsHelper.getOptionValue(OPTION_CONF_PATH);
@@ -154,7 +155,7 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
154155
final KylinConfig envConfig = KylinConfig.getInstanceFromEnv();
155156

156157
HiveContext sqlContext = new HiveContext(sc.sc());
157-
final DataFrame intermediateTable = sqlContext.table(envConfig.getHiveDatabaseForIntermediateTable() + "." + hiveTable);
158+
final DataFrame intermediateTable = sqlContext.table(hiveTable);
158159

159160
final CubeInstance cubeInstance = CubeManager.getInstance(envConfig).getCube(cubeName);
160161
final CubeDesc cubeDesc = cubeInstance.getDescriptor();

0 commit comments

Comments
 (0)