77
77
78
78
79
79
/**
80
+ * Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
80
81
*/
81
82
public class SparkCubingByLayer extends AbstractApplication implements Serializable {
82
83
83
84
protected static final Logger logger = LoggerFactory .getLogger (SparkCubingByLayer .class );
84
85
85
- public static final Option OPTION_INPUT_PATH = OptionBuilder .withArgName ("path" ).hasArg ().isRequired (true ).withDescription ("Hive Intermediate Table" ).create ("hiveTable" );
86
86
public static final Option OPTION_CUBE_NAME = OptionBuilder .withArgName (BatchConstants .ARG_CUBE_NAME ).hasArg ().isRequired (true ).withDescription ("Cube Name" ).create (BatchConstants .ARG_CUBE_NAME );
87
87
public static final Option OPTION_SEGMENT_ID = OptionBuilder .withArgName ("segment" ).hasArg ().isRequired (true ).withDescription ("Cube Segment Id" ).create ("segmentId" );
88
88
public static final Option OPTION_CONF_PATH = OptionBuilder .withArgName ("confPath" ).hasArg ().isRequired (true ).withDescription ("Configuration Path" ).create ("confPath" );
89
89
public static final Option OPTION_OUTPUT_PATH = OptionBuilder .withArgName (BatchConstants .ARG_OUTPUT ).hasArg ().isRequired (true ).withDescription ("Cube output path" ).create (BatchConstants .ARG_OUTPUT );
90
+ public static final Option OPTION_INPUT_TABLE = OptionBuilder .withArgName ("hiveTable" ).hasArg ().isRequired (true ).withDescription ("Hive Intermediate Table" ).create ("hiveTable" );
90
91
91
92
private Options options ;
92
93
93
94
public SparkCubingByLayer () {
94
95
options = new Options ();
95
- options .addOption (OPTION_INPUT_PATH );
96
+ options .addOption (OPTION_INPUT_TABLE );
96
97
options .addOption (OPTION_CUBE_NAME );
97
98
options .addOption (OPTION_SEGMENT_ID );
98
99
options .addOption (OPTION_CONF_PATH );
@@ -134,7 +135,7 @@ private static final void prepare() {
134
135
135
136
@ Override
136
137
protected void execute (OptionsHelper optionsHelper ) throws Exception {
137
- final String hiveTable = optionsHelper .getOptionValue (OPTION_INPUT_PATH );
138
+ final String hiveTable = optionsHelper .getOptionValue (OPTION_INPUT_TABLE );
138
139
final String cubeName = optionsHelper .getOptionValue (OPTION_CUBE_NAME );
139
140
final String segmentId = optionsHelper .getOptionValue (OPTION_SEGMENT_ID );
140
141
final String confPath = optionsHelper .getOptionValue (OPTION_CONF_PATH );
@@ -154,7 +155,7 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
154
155
final KylinConfig envConfig = KylinConfig .getInstanceFromEnv ();
155
156
156
157
HiveContext sqlContext = new HiveContext (sc .sc ());
157
- final DataFrame intermediateTable = sqlContext .table (envConfig . getHiveDatabaseForIntermediateTable () + "." + hiveTable );
158
+ final DataFrame intermediateTable = sqlContext .table (hiveTable );
158
159
159
160
final CubeInstance cubeInstance = CubeManager .getInstance (envConfig ).getCube (cubeName );
160
161
final CubeDesc cubeDesc = cubeInstance .getDescriptor ();
0 commit comments