Skip to content

Commit 0d4b6cb

Browse files
committed
[Taier_244][Taier-spark] fix sqlProxy class name
1 parent 7e6fb82 commit 0d4b6cb

File tree

6 files changed

+15
-109
lines changed

6 files changed

+15
-109
lines changed

build/mvn-build.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/env bash
22
echo 'taier Building...'
33

4-
mvn clean package -DskipTests -pl \
4+
mvn clean package -DskipTests -T 1C -pl \
55
taier-data-develop,\
66
taier-worker/taier-plugins/dummy,\
77
taier-worker/taier-plugins/flink/common,\

taier-worker/taier-plugins/spark/yarn2-hdfs2-spark210-core/spark-yarn-client-core/src/main/java/com/dtstack/taier/sparkyarn/sparkext/CarbondataClientExt.java

-71
This file was deleted.

taier-worker/taier-plugins/spark/yarn2-hdfs2-spark210-core/spark-yarn-client-core/src/main/java/com/dtstack/taier/sparkyarn/sparkext/ClientExtFactory.java

+2-7
Original file line numberDiff line numberDiff line change
@@ -36,13 +36,8 @@ public class ClientExtFactory {
3636
public static ClientExt getClientExt(FilesystemManager filesystemManager,
3737
ClientArguments args,
3838
Configuration hadoopConf,
39-
SparkConf sparkConf,
40-
boolean isCarbondata){
39+
SparkConf sparkConf){
4140

42-
if(!isCarbondata){
43-
return new ClientExt(filesystemManager, args, hadoopConf, sparkConf);
44-
}else{
45-
return new CarbondataClientExt(filesystemManager, args, hadoopConf, sparkConf);
46-
}
41+
return new ClientExt(filesystemManager, args, hadoopConf, sparkConf);
4742
}
4843
}

taier-worker/taier-plugins/spark/yarn2-hdfs2-spark210-core/spark-yarn-client-core/src/main/java/com/dtstack/taier/sparkyarn/sparkyarn/SparkYarnClient.java

+4-28
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,8 @@
2323
import com.dtstack.taier.base.monitor.AcceptedApplicationMonitor;
2424
import com.dtstack.taier.base.util.HadoopConfTool;
2525
import com.dtstack.taier.base.util.KerberosUtils;
26-
import com.dtstack.taier.pluginapi.CustomThreadFactory;
27-
import com.dtstack.taier.pluginapi.JarFileInfo;
28-
import com.dtstack.taier.pluginapi.JobClient;
29-
import com.dtstack.taier.pluginapi.JobIdentifier;
30-
import com.dtstack.taier.pluginapi.JobParam;
3126
import com.dtstack.taier.base.util.Splitter;
27+
import com.dtstack.taier.pluginapi.*;
3228
import com.dtstack.taier.pluginapi.client.AbstractClient;
3329
import com.dtstack.taier.pluginapi.enums.ComputeType;
3430
import com.dtstack.taier.pluginapi.enums.EJobType;
@@ -65,20 +61,13 @@
6561
import org.apache.spark.deploy.yarn.ClientArguments;
6662
import org.slf4j.Logger;
6763
import org.slf4j.LoggerFactory;
68-
import sun.misc.BASE64Decoder;
6964

7065
import java.io.File;
7166
import java.io.IOException;
7267
import java.net.URLDecoder;
7368
import java.net.URLEncoder;
7469
import java.security.PrivilegedExceptionAction;
75-
import java.util.ArrayList;
76-
import java.util.Arrays;
77-
import java.util.HashMap;
78-
import java.util.Iterator;
79-
import java.util.List;
80-
import java.util.Map;
81-
import java.util.Properties;
70+
import java.util.*;
8271
import java.util.concurrent.LinkedBlockingQueue;
8372
import java.util.concurrent.ThreadPoolExecutor;
8473
import java.util.concurrent.TimeUnit;
@@ -90,8 +79,6 @@ public class SparkYarnClient extends AbstractClient {
9079

9180
private static final Logger logger = LoggerFactory.getLogger(SparkYarnClient.class);
9281

93-
private static final BASE64Decoder DECODER = new BASE64Decoder();
94-
9582
private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
9683

9784
private static final String SPARK_YARN_MODE = "SPARK_YARN_MODE";
@@ -227,9 +214,6 @@ private JobResult submitJobWithJar(JobClient jobClient){
227214
appArgs = exeArgsStr.split("\\s+");
228215
}
229216

230-
Properties confProp = jobClient.getConfProperties();
231-
Boolean isCarbonSpark = MathUtil.getBoolean(confProp.get(IS_CARBON_SPARK_KEY), false);
232-
233217
List<String> argList = new ArrayList<>();
234218
argList.add("--jar");
235219
argList.add(jarPath);
@@ -254,7 +238,7 @@ private JobResult submitJobWithJar(JobClient jobClient){
254238
ApplicationId appId = null;
255239

256240
try {
257-
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf, isCarbonSpark);
241+
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
258242
clientExt.setSparkYarnConfig(sparkYarnConfig);
259243
String proxyUserName = sparkYarnConfig.getDtProxyUserName();
260244
if (StringUtils.isNotBlank(proxyUserName)) {
@@ -419,8 +403,6 @@ private void addEnv2SparkConf(String cmdStr, SparkConf sparkConf) {
419403
private JobResult submitSparkSqlJobForBatch(JobClient jobClient){
420404

421405
Properties confProp = jobClient.getConfProperties();
422-
Boolean isCarbonSpark = MathUtil.getBoolean(confProp.get(IS_CARBON_SPARK_KEY), false);
423-
424406
setHadoopUserName(sparkYarnConfig);
425407
Map<String, Object> paramsMap = new HashMap<>();
426408

@@ -434,9 +416,6 @@ private JobResult submitSparkSqlJobForBatch(JobClient jobClient){
434416
paramsMap.put("logLevel", logLevel);
435417
}
436418

437-
if(isCarbonSpark){
438-
paramsMap.put("storePath", sparkYarnConfig.getCarbonStorePath());
439-
}
440419

441420
String sqlExeJson = null;
442421
try{
@@ -448,9 +427,6 @@ private JobResult submitSparkSqlJobForBatch(JobClient jobClient){
448427
}
449428

450429
String sqlProxyClass = sparkYarnConfig.getSparkSqlProxyMainClass();
451-
if(isCarbonSpark){
452-
sqlProxyClass = SparkYarnConfig.DEFAULT_CARBON_SQL_PROXY_MAINCLASS;
453-
}
454430

455431
List<String> argList = new ArrayList<>();
456432
argList.add("--jar");
@@ -470,7 +446,7 @@ private JobResult submitSparkSqlJobForBatch(JobClient jobClient){
470446
ApplicationId appId = null;
471447

472448
try {
473-
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf, isCarbonSpark);
449+
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
474450
clientExt.setSparkYarnConfig(sparkYarnConfig);
475451
String proxyUserName = sparkYarnConfig.getDtProxyUserName();
476452
if (StringUtils.isNotBlank(proxyUserName)) {

taier-worker/taier-plugins/spark/yarn2-hdfs2-spark210-core/spark-yarn-client-core/src/main/java/com/dtstack/taier/sparkyarn/sparkyarn/SparkYarnConfig.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ public class SparkYarnConfig extends BaseConfig {
3737

3838
private static final String DEFAULT_SPARK_PYTHON_EXTLIBPATH = "%s/pythons/pyspark.zip,/pythons/py4j-0.10.4-src.zip";
3939

40-
private static final String DEFAULT_SPARK_SQL_PROXY_MAINCLASS = "com.dtstack.sql.main.SqlProxy";
40+
private static final String DEFAULT_SPARK_SQL_PROXY_MAINCLASS = "com.dtstack.taier.sql.main.SqlProxy";
4141

4242
public static final String DEFAULT_CARBON_SQL_PROXY_MAINCLASS = "com.dtstack.sql.main.CarbondataSqlProxy";
4343

website/docs/faq.md

+7-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ A: 确认Taier目录下 是否有pluginLibs目录
2424
### Q: 添加数据源失败
2525
A: 确认Taier目录下application.properties datasource.plugin.path是否配置正确
2626

27-
### Q: 获取目录失败,清联系管理员
27+
### Q: 获取目录失败,请联系管理员
2828
A: 是否正确配置集群,集群是否正确获取到了队列,租户和集群是否绑定成功 参考[快速上手](./quickstart/start.md)
2929

3030
### Q: 页面访问报错, 无法登陆用户
@@ -33,4 +33,10 @@ A: 确认下前端配置的后端接口是否正确 参考[快速上手](./quick
3333
### Q:绑定新租户时初始化时提示数据库已存在
3434
A: 绑定租户新增schema选择创建 绑定租户已有schema直接选择对接
3535

36+
### Q:spark sql任务执行提示class not found
37+
A: 编译完对应的spark210插件 需要将对应的sqlProxy的jar包 放到对应集群spark组件下sparkSqlProxyPath路径下
38+
39+
### Q:spark sql任务执行提示sftp downloadDir error
40+
A: 确认下sftp组件配置是否正确 sftp组件配置路径 + confPath + 集群名称 是否有对应的xml配置文件
41+
3642

0 commit comments

Comments
 (0)