diff --git a/pom.xml b/pom.xml index eabaaba66..0fc4d2fba 100644 --- a/pom.xml +++ b/pom.xml @@ -45,14 +45,15 @@ - 1.1.1 + 1.1.3 + 4.12 1.1.0 0.2.0 2.11.12 1.8 3.3.3 2.8.5 - 2.11.3 + 2.13.2 3.1.1 4.5.4 4.5.4 diff --git a/streamis-jobmanager/pom.xml b/streamis-jobmanager/pom.xml index b1a644e09..e88713dc2 100644 --- a/streamis-jobmanager/pom.xml +++ b/streamis-jobmanager/pom.xml @@ -32,6 +32,7 @@ streamis-job-manager streamis-jobmanager-server streamis-projectmanager-server + streamis-job-log diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java index 78701f646..1352c8b28 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java @@ -15,7 +15,6 @@ package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url; -import org.apache.commons.lang.StringUtils; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala index 2fdb12b7b..d6fb5c661 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala @@ -18,6 +18,7 @@ trait FlinkLogIterator extends Iterator[String] with Closeable { val engineConnLogOperator: EngineConnLogOperator def init(): Unit def getLogPath: String + def getLogDirSuffix: String def getLogs: util.ArrayList[String] def getEndLine: Long } @@ -28,6 +29,7 @@ class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload, private var logs: util.ArrayList[String] = _ private var index = 0 private var logPath: String = _ + private var logDirSuffix: String = _ private var isClosed = true private var endLine = 0 @@ -69,4 +71,8 @@ class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload, override def getLogs: util.ArrayList[String] = logs override def getEndLine: Long = endLine + + def setLogDirSuffix(logDirSuffix: String) : Unit = this.logDirSuffix = logDirSuffix + + override def getLogDirSuffix: String = logDirSuffix } diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala index 29f90d325..20cb4d081 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala @@ -14,6 +14,7 @@ class LogRequestPayload { private var onlyKeywords: String = _ private var lastRows = 0 private var logType: String = _ + private var logHistory: Boolean = false def getPageSize: Int = pageSize def setPageSize(pageSize: Int): Unit = this.pageSize = pageSize @@ -32,4 +33,8 @@ class LogRequestPayload { def getLogType: String = logType def setLogType(logType: String): Unit = this.logType = logType + + def isLogHistory: Boolean = logHistory + + def setLogHistory(logHistory: Boolean): Unit = this.logHistory = logHistory } diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala index 951145b94..b5c64a523 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala @@ -23,13 +23,17 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core.{FlinkLo import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobLaunchErrorException, FlinkJobStateFetchException, FlinkSavePointException} import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager -import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkTriggerSavepointOperator, FlinkYarnLogOperator} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkClientLogOperator, FlinkTriggerSavepointOperator, FlinkYarnLogOperator} import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint} +import org.apache.commons.lang3.StringUtils import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.computation.client.once.OnceJob -import org.apache.linkis.computation.client.once.simple.SimpleOnceJob +import org.apache.linkis.computation.client.once.action.ECResourceInfoAction +import org.apache.linkis.computation.client.once.result.ECResourceInfoResult +import org.apache.linkis.computation.client.once.{LinkisManagerClient, LinkisManagerClientImpl, OnceJob} +import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder} import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator - +import org.apache.linkis.httpclient.dws.DWSHttpClient +import java.util import java.net.URI class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: JobStateManager) @@ -39,9 +43,13 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: * Log operator */ private var logOperatorMap = Map( - "client" -> EngineConnLogOperator.OPERATOR_NAME, + "client" -> FlinkClientLogOperator.OPERATOR_NAME, "yarn" -> FlinkYarnLogOperator.OPERATOR_NAME ) + /** + * The linkis client in onceJob + */ + private var linkisClient: DWSHttpClient = _ override def getJobInfo: FlinkJobInfo = { getJobInfo(false) @@ -99,13 +107,38 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: case Some(operator) => onceJob.getOperator(operator) match { case engineConnLogOperator: EngineConnLogOperator => + val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator) + engineConnLogOperator match { + case clientLogOperator: FlinkClientLogOperator => + var logDirSuffix = this.jobInfo.getLogDirSuffix + if (StringUtils.isBlank(logDirSuffix) && requestPayload.isLogHistory){ + // If want to fetch the history log, must get the log directory suffix first + getLinkisClient match { + case client: DWSHttpClient => + Option(Utils.tryCatch{ + client.execute(ECResourceInfoAction.newBuilder().setUser(jobInfo.getUser) + .setTicketid(clientLogOperator.getTicketId).build()).asInstanceOf[ECResourceInfoResult] + }{ + case e: Exception => + warn("Fail to query the engine conn resource info from linkis", e) + null + }) match { + case Some(result) => logDirSuffix = Utils.tryAndWarn{result.getData.getOrDefault("ecResourceInfoRecord", new util.HashMap[String, Any]).asInstanceOf[util.Map[String, Any]] + .getOrDefault("logDirSuffix", "").asInstanceOf[String]} + case _ => + } + } + } + clientLogOperator.setLogDirSuffix(logDirSuffix) + logIterator.setLogDirSuffix(logDirSuffix) + case _ => + } engineConnLogOperator match { case yarnLogOperator: FlinkYarnLogOperator => yarnLogOperator.setApplicationId(jobInfo.getApplicationId) case _ => } engineConnLogOperator.setECMServiceInstance(jobInfo.getECMInstance) engineConnLogOperator.setEngineConnType(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE) - val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator) logIterator.init() jobInfo match { case jobInfo: FlinkJobInfo => jobInfo.setLogPath(logIterator.getLogPath) @@ -161,5 +194,27 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: triggerSavepoint(savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue) } + /** + * Get linkis client + * @return + */ + def getLinkisClient: DWSHttpClient = { + Utils.tryAndWarn{ + if (null == this.linkisClient){ + this.synchronized{ + if (null == this.linkisClient){ + this.linkisClient = SimpleOnceJobBuilder.getLinkisManagerClient match { + case client: LinkisManagerClient => + val dwsClientField = classOf[LinkisManagerClientImpl].getDeclaredField("dwsHttpClient") + dwsClientField.setAccessible(true) + dwsClientField.get(client).asInstanceOf[DWSHttpClient] + case _ => null + } + } + } + } + this.linkisClient + } + } } diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala index 5c2986609..0fd1f1783 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala @@ -34,41 +34,53 @@ class FlinkJobInfo extends YarnJobInfo { private var applicationUrl: String = _ private var status: String = _ private var logPath: String = _ + private var logDirSuffix: String = _ private var resources: java.util.Map[String, Object] = _ private var completedMsg: String = _ private var jobStates: Array[JobStateInfo] = _ + override def getApplicationId: String = applicationId + def setApplicationId(applicationId: String): Unit = this.applicationId = applicationId override def getApplicationUrl: String = applicationUrl + def setApplicationUrl(applicationUrl: String): Unit = this.applicationUrl = applicationUrl override def getId: String = id - def setId(id: String): Unit = this.id = id + def setId(id: String): Unit = this.id = id override def getECMInstance: ServiceInstance = ecmInstance + def setECMInstance(ecmInstance: ServiceInstance): Unit = this.ecmInstance = ecmInstance override def getUser: String = user + def setUser(user: String): Unit = this.user = user override def getStatus: String = status + override def setStatus(status: String): Unit = this.status = status override def getLogPath: String = logPath + def setLogPath(logPath: String): Unit = this.logPath = logPath override def getResources: util.Map[String, Object] = resources + def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources def getSavepoint: String = savepoint + def setSavepoint(savepoint: String): Unit = this.savepoint = savepoint def getCheckpoint: String = checkpoint + def setCheckpoint(checkpoint: String): Unit = this.checkpoint = checkpoint override def getCompletedMsg: String = completedMsg + def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg override def toString: String = s"FlinkJobInfo(id: $id, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)" @@ -85,6 +97,7 @@ class FlinkJobInfo extends YarnJobInfo { def setJobStates(jobStates: Array[JobStateInfo]): Unit = { this.jobStates = jobStates } + /** * Job name * @@ -95,11 +108,16 @@ class FlinkJobInfo extends YarnJobInfo { def setName(name: String): Unit = { this.name = name } -} -object FlinkJobInfo{ - def main(args: Array[String]): Unit = { - val jobInfo = "{\"jobStates:\":{\"location\":\"xx\"}" - DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[FlinkJobInfo]) + /** + * Job log directory suffix + * + * @return + */ + override def getLogDirSuffix: String = this.logDirSuffix + + override def setLogDirSuffix(logDirSuffix: String): Unit = { + this.logDirSuffix = logDirSuffix } } + diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala index da02fda50..4183f5025 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala @@ -11,5 +11,11 @@ trait LinkisJobInfo extends JobInfo { */ def getECMInstance: ServiceInstance + /** + * Job log directory suffix + * @return + */ + def getLogDirSuffix: String + def setLogDirSuffix(logDirSuffix: String): Unit } diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala index 6ad2e4f88..8b4308a2b 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala @@ -15,16 +15,16 @@ package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager -import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo} +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob} import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration -import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo} import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager.INSTANCE_NAME -import org.apache.commons.lang.StringEscapeUtils +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo} +import org.apache.commons.lang3.StringEscapeUtils import org.apache.linkis.common.utils.{RetryHandler, Utils} import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SubmittableSimpleOnceJob} import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob} -import org.apache.linkis.computation.client.operator.impl.{EngineConnApplicationInfoOperator, EngineConnLogOperator} +import org.apache.linkis.computation.client.operator.impl.EngineConnApplicationInfoOperator import org.apache.linkis.httpclient.dws.DWSHttpClient import org.apache.linkis.ujes.client.exception.UJESJobException diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala new file mode 100644 index 000000000..a41018a74 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator + +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.operator.impl.{EngineConnLogOperator, EngineConnLogs} + +/** + * Append "logDirSuffix" parameter + */ +class FlinkClientLogOperator extends EngineConnLogOperator{ + + private var logDirSuffix: String = _ + + def setLogDirSuffix(logDirSuffix: String): Unit = { + this.logDirSuffix = logDirSuffix + } + + protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + builder.operatorName(EngineConnLogOperator.OPERATOR_NAME) + if (StringUtils.isNotBlank(this.logDirSuffix)) { + builder.addParameter("logDirSuffix", logDirSuffix) + } + super.addParameters(builder) + } + + + override def getTicketId: String = super.getTicketId + + override def getName: String = FlinkClientLogOperator.OPERATOR_NAME +} + +object FlinkClientLogOperator { + val OPERATOR_NAME = "engineConnLog_flink" +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala index a24e12580..975b23405 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala @@ -19,9 +19,9 @@ import org.apache.linkis.computation.client.once.action.EngineConnOperateAction import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator /** - * Extend the engine conn log operator + * Extend the flink client log operator */ -class FlinkYarnLogOperator extends EngineConnLogOperator{ +class FlinkYarnLogOperator extends FlinkClientLogOperator { private var applicationId: String = _ @@ -30,8 +30,9 @@ class FlinkYarnLogOperator extends EngineConnLogOperator{ } protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { - builder.addParameter("yarnApplicationId", this.applicationId) super.addParameters(builder) + builder.operatorName(getName) + builder.addParameter("yarnApplicationId", this.applicationId) } override def getName: String = FlinkYarnLogOperator.OPERATOR_NAME diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala index f756daecc..187b02288 100644 --- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala @@ -23,6 +23,10 @@ import org.apache.linkis.common.conf.CommonVars */ object JobConfKeyConstants { + /** + * Config group for streamis internal configuration + */ + val GROUP_INTERNAL: CommonVars[String] = CommonVars("wds.streamis.job.internal.config.group", "wds.streamis.internal.params") /** * Group: Flink extra */ diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml new file mode 100644 index 000000000..ca49e4399 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml @@ -0,0 +1,96 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../../pom.xml + + 4.0.0 + + flink-streamis-log-collector + + + 8 + 8 + + 1.12.2 + 2.17.1 + 1.7.15 + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector + ${streamis.version} + + + + org.apache.flink + flink-java + ${flink.version} + provided + + + org.apache.flink + flink-yarn_2.11 + ${flink.version} + provided + + + + junit + junit + ${junit.version} + test + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-api + ${log4j.version} + provided + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + + + assemble + + single + + + install + + + + + src/main/assembly/package.xml + + false + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml new file mode 100644 index 000000000..8da27bf2c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml @@ -0,0 +1,19 @@ + + + package + + + jar + + false + + + / + true + runtime + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java new file mode 100644 index 000000000..ba1756006 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java @@ -0,0 +1,120 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.commons.lang3.StringUtils; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.GlobalConfiguration; +import org.apache.flink.runtime.util.EnvironmentInformation; +import org.apache.flink.yarn.configuration.YarnConfigOptions; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.filter.LevelMatchFilter; +import org.apache.logging.log4j.core.filter.RegexFilter; +import org.apache.logging.log4j.core.filter.ThresholdFilter; + +import java.util.Enumeration; +import java.util.List; +import java.util.Properties; + +import static com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigDefine.*; + +/** + * Autoconfigure the streamis config inf Flink environment + */ +public class FlinkStreamisConfigAutowired implements StreamisConfigAutowired { + + /** + * Flink configuration + */ + private Configuration configuration; + + public FlinkStreamisConfigAutowired(){ + // First to load configuration + // We should sleep and wait for append of the flink-yaml.conf + } + @Override + public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception{ + this.configuration = loadConfiguration(); + String applicationName = + this.configuration.getString(YarnConfigOptions.APPLICATION_NAME); + if (StringUtils.isNotBlank(applicationName)){ + builder.setAppName(applicationName); + } + String gateway = this.configuration.getString(LOG_GATEWAY_ADDRESS); + if (StringUtils.isNotBlank(gateway)){ + if (gateway.endsWith("/")){ + gateway = gateway.substring(0, gateway.length() - 1); + } + gateway += this.configuration.getString(LOG_COLLECT_PATH, "/"); + builder.setRpcAddress(gateway); + } + if (builder instanceof StreamisLog4j2AppenderConfig.Builder) { + List filterStrategies = this.configuration.get(LOG_FILTER_STRATEGIES); + for (String filterStrategy : filterStrategies) { + if ("LevelMatch".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(LevelMatchFilter.newBuilder().setOnMatch(Filter.Result.ACCEPT).setOnMismatch(Filter.Result.DENY) + .setLevel(Level.getLevel(this.configuration.getString(LOG_FILTER_LEVEL_MATCH))).build()); + } else if ("ThresholdFilter".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(ThresholdFilter.createFilter(Level + .getLevel(this.configuration.getString(LOG_FILTER_THRESHOLD_MATCH)), Filter.Result.ACCEPT, Filter.Result.DENY)); + } else if ("RegexMatch".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(RegexFilter.createFilter(this.configuration.getString(LOG_FILTER_REGEX), + null, true, Filter.Result.ACCEPT, Filter.Result.DENY)); + } + } + } + String hadoopUser = EnvironmentInformation.getHadoopUser(); + if (hadoopUser.equals("") || hadoopUser.equals("")){ + hadoopUser = System.getProperty("user.name"); + } + return builder.setRpcConnTimeout(this.configuration.getInteger(LOG_RPC_CONN_TIMEOUT)) + .setRpcSocketTimeout(this.configuration.getInteger(LOG_RPC_SOCKET_TIMEOUT)) + .setRpcSendRetryCnt(this.configuration.getInteger(LOG_RPC_SEND_RETRY_COUNT)) + .setRpcServerRecoveryTimeInSec(this.configuration.getInteger(LOG_RPC_SERVER_RECOVERY_TIME)) + .setRpcMaxDelayTimeInSec(this.configuration.getInteger(LOG_RPC_MAX_DELAY_TIME)) + .setRpcAuthTokenCodeKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE_KEY)) + .setRpcAuthTokenUserKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER_KEY)) + .setRpcAuthTokenCode(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE)) + .setRpcAuthTokenUser(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER, + hadoopUser)) + .setRpcCacheSize(this.configuration.getInteger(LOG_RPC_CACHE_SIZE)) + .setRpcCacheMaxConsumeThread(this.configuration.getInteger(LOG_PRC_CACHE_MAX_CONSUME_THREAD)) + .setRpcBufferSize(this.configuration.getInteger(LOG_RPC_BUFFER_SIZE)) + .setRpcBufferExpireTimeInSec(this.configuration.getInteger(LOG_RPC_BUFFER_EXPIRE_TIME)).build(); + } + + /** + * According to : + * String launchCommand = + * BootstrapTools.getTaskManagerShellCommand( + * flinkConfig, + * tmParams, + * ".", + * ApplicationConstants.LOG_DIR_EXPANSION_VAR, + * hasLogback, + * hasLog4j, + * hasKrb5, + * taskManagerMainClass, + * taskManagerDynamicProperties); + * the configuration directory of Flink yarn container is always ".", + * @return configuration + */ + private synchronized Configuration loadConfiguration(){ +// String configDir = System.getenv("FLINK_CONF_DIR"); +// if (null == configDir){ +// configDir = "."; +// } + String configDir = "."; + Properties properties = System.getProperties(); + Enumeration enumeration = properties.propertyNames(); + Configuration dynamicConfiguration = new Configuration(); + while(enumeration.hasMoreElements()){ + String prop = String.valueOf(enumeration.nextElement()); + dynamicConfiguration.setString(prop, properties.getProperty(prop)); + } + return GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java new file mode 100644 index 000000000..379f15e0e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java @@ -0,0 +1,125 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; + +import java.util.List; + +/** + * Config definition + */ +public class FlinkStreamisConfigDefine { + + /** + * Gateway address of log module for streamis + */ + public static final ConfigOption LOG_GATEWAY_ADDRESS = ConfigOptions.key("stream.log.gateway.address") + .stringType().noDefaultValue().withDescription("The gateway address ex: http://127.0.0.1:8080"); + + /** + * Entrypoint path of collecting log + */ + public static final ConfigOption LOG_COLLECT_PATH = ConfigOptions.key("stream.log.collect.path") + .stringType().defaultValue("/api/rest_j/v1/streamis/streamJobManager/log/collect/events").withDescription("The entrypoint path of collecting log"); + + /** + * Connection timeout(in milliseconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_CONN_TIMEOUT = ConfigOptions.key("stream.log.rpc.connect-timeout") + .intType().defaultValue(3000).withDescription("Connection timeout(ms) in log RPC module"); + + /** + * Socket timeout(in milliseconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_SOCKET_TIMEOUT = ConfigOptions.key("stream.log.rpc.socket-timeout") + .intType().defaultValue(15000).withDescription("Socket timeout(ms) in log RPC module"); + + /** + * Max retry count of sending message in log RPC module + */ + public static final ConfigOption LOG_RPC_SEND_RETRY_COUNT = ConfigOptions.key("stream.log.rpc.send-retry-count") + .intType().defaultValue(3).withDescription("Max retry count of sending message in log RPC module"); + + /** + * Server recovery time(in seconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_SERVER_RECOVERY_TIME = ConfigOptions.key("stream.log.rpc.server-recovery-time-in-sec") + .intType().defaultValue(5).withDescription("Server recovery time(sec) in log RPC module"); + + /** + * Max delay time(in seconds) in log RPC module. if reach the limit, the message will be dropped + */ + public static final ConfigOption LOG_RPC_MAX_DELAY_TIME = ConfigOptions.key("stream.log.rpc.max-delay-time") + .intType().defaultValue(60).withDescription("Max delay time(sec) in log RPC module"); + + /** + * Token code key in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE_KEY = ConfigOptions.key("stream.log.rpc.auth.token-code-key") + .stringType().defaultValue("Token-Code").withDescription("Token code key in log RPC auth module"); + + /** + * Token user key in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER_KEY = ConfigOptions.key("stream.log.rpc.auth.token-user-key") + .stringType().defaultValue("Token-User").withDescription("Token user key in log RPC auth module"); + + /** + * Token code in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE = ConfigOptions.key("stream.log.rpc.auth.token-code") + .stringType().defaultValue("STREAM-LOG").withDescription("Token code in log RPC auth module"); + + /** + * Token user in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER = ConfigOptions.key("stream.log.rpc.auth.token-user") + .stringType().defaultValue(System.getProperty("user.name")).withDescription("Token user in log RPC auth module"); + + /** + * Cache size in log RPC module + */ + public static final ConfigOption LOG_RPC_CACHE_SIZE = ConfigOptions.key("stream.log.rpc.cache.size") + .intType().defaultValue(150).withDescription("Cache size in log RPC module"); + + /** + * Max cache consume threads in log RPC module + */ + public static final ConfigOption LOG_PRC_CACHE_MAX_CONSUME_THREAD = ConfigOptions.key("stream.log.rpc.cache.max-consume-thread") + .intType().defaultValue(10).withDescription("Max cache consume threads in log RPC module"); + + /** + * Buffer size in log RPC module + */ + public static final ConfigOption LOG_RPC_BUFFER_SIZE = ConfigOptions.key("stream.log.rpc.buffer.size") + .intType().defaultValue(50).withDescription("Buffer size in log RPC module"); + + /** + * Buffer expire time(sec) in log RPC module + */ + public static final ConfigOption LOG_RPC_BUFFER_EXPIRE_TIME = ConfigOptions.key("stream.log.rpc.buffer.expire-time-in-sec") + .intType().defaultValue(2).withDescription("Buffer expire time (sec) in log RPC module"); + + /** + * Log filter strategy list + */ + public static final ConfigOption> LOG_FILTER_STRATEGIES = ConfigOptions.key("stream.log.filter.strategies") + .stringType().asList().defaultValues("LevelMatch").withDescription("Log filter strategy list"); + + /** + * Level value of LevelMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_LEVEL_MATCH = ConfigOptions.key("stream.log.filter.level-match.level") + .stringType().defaultValue("ERROR").withDescription("Level value of LevelMatch filter strategy"); + + /** + * Level value of ThresholdMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_THRESHOLD_MATCH = ConfigOptions.key("stream.log.filter.threshold.level") + .stringType().defaultValue("ERROR").withDescription("Level value of ThresholdMatch filter strategy"); + /** + * Regex value of RegexMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_REGEX = ConfigOptions.key("stream.log.filter.regex.value") + .stringType().defaultValue(".*").withDescription("Regex value of RegexMatch filter strategy"); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dc13253b7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java new file mode 100644 index 000000000..6958957a7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.GlobalConfiguration; +import org.junit.Test; + +import java.util.Enumeration; +import java.util.Objects; +import java.util.Properties; + +public class FlinkConfigurationLoadTest { + @Test + public void loadConfiguration() { + String configDir = Objects.requireNonNull(FlinkConfigurationLoadTest.class.getResource("/")).getFile(); + Properties properties = System.getProperties(); + Enumeration enumeration = properties.propertyNames(); + Configuration dynamicConfiguration = new Configuration(); + while(enumeration.hasMoreElements()){ + String prop = String.valueOf(enumeration.nextElement()); + dynamicConfiguration.setString(prop, properties.getProperty(prop)); + } + GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml new file mode 100644 index 000000000..0cdb47b8d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml @@ -0,0 +1,40 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector-core + + + 8 + 8 + 4.5.13 + 4.5.4 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-common + ${streamis.version} + + + + org.apache.httpcomponents + httpclient + ${httpclient.version} + + + org.apache.httpcomponents + httpmime + ${httpmine.version} + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java new file mode 100644 index 000000000..4c9ac6ea8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +/** + * Exception listener + */ +public interface ExceptionListener { + + /** + * Listen the exception + * @param subject the subject that throws the exception + * @param t Throwable + * @param message message + */ + void onException(Object subject, Throwable t, String message); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java new file mode 100644 index 000000000..f11556cc8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.cache; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +/** + * Log cache + * @param element + */ +public interface LogCache { + + /** + * Cache log + * @param logElement log element + */ + void cacheLog(E logElement) throws InterruptedException; + + /** + * Drain log elements into collection + * @param elements elements + * @param maxElements max elements size + * @return count + */ + int drainLogsTo(List elements, int maxElements); + + /** + * Take log element + * @return log element + */ + E takeLog(long timeout, TimeUnit unit) throws InterruptedException; + + /** + * If the cache is full + * @return + */ + boolean isCacheable(); + /** + * Release the resource + */ + void destroy(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java new file mode 100644 index 000000000..ebf9b7f68 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java @@ -0,0 +1,86 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +/** + * Authentication config + */ +public class RpcAuthConfig { + /** + * Key of token-code + */ + private String tokenCodeKey = "Token-Code"; + + /** + * Key of token-user + */ + private String tokenUserKey = "Token-User"; + + /** + * Token user + */ + private String tokenUser = System.getProperty("user.name"); + + /** + * Token code + */ + private String tokenCode = "STREAM-LOG"; + + public RpcAuthConfig(){ + + } + + public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser){ + if (null != tokenCodeKey) { + this.tokenCodeKey = tokenCodeKey; + } + if (null != tokenCode){ + this.tokenCode = tokenCode; + } + if (null != tokenUserKey){ + this.tokenUserKey = tokenUserKey; + } + if (null != tokenUser){ + this.tokenUser = tokenUser; + } + } + + public String getTokenCodeKey() { + return tokenCodeKey; + } + + public void setTokenCodeKey(String tokenCodeKey) { + this.tokenCodeKey = tokenCodeKey; + } + + public String getTokenUserKey() { + return tokenUserKey; + } + + public void setTokenUserKey(String tokenUserKey) { + this.tokenUserKey = tokenUserKey; + } + + public String getTokenUser() { + return tokenUser; + } + + public void setTokenUser(String tokenUser) { + this.tokenUser = tokenUser; + } + + public String getTokenCode() { + return tokenCode; + } + + public void setTokenCode(String tokenCode) { + this.tokenCode = tokenCode; + } + + @Override + public String toString() { + return "RpcAuthConfig{" + + ", tokenUserKey='" + tokenUserKey + '\'' + + ", tokenUser='" + tokenUser + '\'' + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java new file mode 100644 index 000000000..0fbc563d7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java @@ -0,0 +1,166 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +import java.util.Objects; + +/** + * Rpc sender configuration + */ +public class RpcLogSenderConfig { + + /** + * Send address + */ + private String address; + + /** + * Timeout of connecting + */ + private int connectionTimeout = 3000; + + /** + * Timeout of reading from socket + */ + private int socketTimeout = 15000; + + /** + * Retry count of sending + */ + private int sendRetryCnt = 3; + + /** + * The time for server recovery + */ + private int serverRecoveryTimeInSec = 5; + + /** + * Retry max delay time of sender + */ + private int maxDelayTimeInSec = 60; + + /** + * Auth config + */ + private RpcAuthConfig authConfig = new RpcAuthConfig(); + + /** + * Cache config + */ + private SendLogCacheConfig cacheConfig = new SendLogCacheConfig(); + + /** + * Buffer config + */ + private SendBufferConfig bufferConfig = new SendBufferConfig(); + + public RpcLogSenderConfig(){ + + } + + public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, + int serverRecoveryTimeInSec, int maxDelayTimeInSec, + RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig){ + this.address = address; + this.sendRetryCnt = sendRetryCnt; + this.connectionTimeout = connectionTimeout; + this.socketTimeout = socketTimeout; + this.serverRecoveryTimeInSec = serverRecoveryTimeInSec; + this.maxDelayTimeInSec = maxDelayTimeInSec; + if (Objects.nonNull(authConfig)){ + this.authConfig = authConfig; + } + if (Objects.nonNull(cacheConfig)){ + this.cacheConfig = cacheConfig; + } + if (Objects.nonNull(bufferConfig)){ + this.bufferConfig = bufferConfig; + } + } + + public RpcAuthConfig getAuthConfig() { + return authConfig; + } + + public void setAuthConfig(RpcAuthConfig authConfig) { + this.authConfig = authConfig; + } + + public SendLogCacheConfig getCacheConfig() { + return cacheConfig; + } + + public void setCacheConfig(SendLogCacheConfig cacheConfig) { + this.cacheConfig = cacheConfig; + } + + public SendBufferConfig getBufferConfig() { + return bufferConfig; + } + + public void setBufferConfig(SendBufferConfig bufferConfig) { + this.bufferConfig = bufferConfig; + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public int getSendRetryCnt() { + return sendRetryCnt; + } + + public void setSendRetryCnt(int sendRetryCnt) { + this.sendRetryCnt = sendRetryCnt; + } + + public int getConnectionTimeout() { + return connectionTimeout; + } + + public void setConnectionTimeout(int connectionTimeout) { + this.connectionTimeout = connectionTimeout; + } + + public int getSocketTimeout() { + return socketTimeout; + } + + public void setSocketTimeout(int socketTimeout) { + this.socketTimeout = socketTimeout; + } + + public int getMaxDelayTimeInSec() { + return maxDelayTimeInSec; + } + + public void setMaxDelayTimeInSec(int maxDelayTimeInSec) { + this.maxDelayTimeInSec = maxDelayTimeInSec; + } + + public int getServerRecoveryTimeInSec() { + return serverRecoveryTimeInSec; + } + + public void setServerRecoveryTimeInSec(int serverRecoveryTimeInSec) { + this.serverRecoveryTimeInSec = serverRecoveryTimeInSec; + } + + @Override + public String toString() { + return "RpcLogSenderConfig{" + + "address='" + address + '\'' + + ", connectionTimeout=" + connectionTimeout + + ", socketTimeout=" + socketTimeout + + ", sendRetryCnt=" + sendRetryCnt + + ", serverRecoveryTimeInSec=" + serverRecoveryTimeInSec + + ", maxDelayTimeInSec=" + maxDelayTimeInSec + + ", authConfig=" + authConfig + + ", cacheConfig=" + cacheConfig + + ", bufferConfig=" + bufferConfig + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java new file mode 100644 index 000000000..6be0ae826 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +public class SendBufferConfig { + /** + * Size of send buffer + */ + private int size = 50; + + /** + * Expire time of send buffer + */ + private long expireTimeInSec = 2; + + public SendBufferConfig(){ + + } + + public SendBufferConfig(int size, long expireTimeInSec){ + this.size = size; + this.expireTimeInSec = expireTimeInSec; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public long getExpireTimeInSec() { + return expireTimeInSec; + } + + public void setExpireTimeInSec(long expireTimeInSec) { + this.expireTimeInSec = expireTimeInSec; + } + + @Override + public String toString() { + return "SendBufferConfig{" + + "size=" + size + + ", expireTimeInSec=" + expireTimeInSec + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java new file mode 100644 index 000000000..1caaedb21 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java @@ -0,0 +1,50 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + +/** + * Cache config + */ + +public class SendLogCacheConfig { + /** + * Size of send cache + */ + private int size = 150; + + /** + * Max number of consuming thread + */ + private int maxConsumeThread = 10; + + public SendLogCacheConfig(){ + + } + + public SendLogCacheConfig(int size, int maxConsumeThread){ + this.size = size; + this.maxConsumeThread = maxConsumeThread; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public int getMaxConsumeThread() { + return maxConsumeThread; + } + + public void setMaxConsumeThread(int maxConsumeThread) { + this.maxConsumeThread = maxConsumeThread; + } + + @Override + public String toString() { + return "SendLogCacheConfig{" + + "size=" + size + + ", maxConsumeThread=" + maxConsumeThread + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java new file mode 100644 index 000000000..35680913f --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java @@ -0,0 +1,205 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + +import java.util.ArrayList; +import java.util.Objects; +import java.util.Optional; + +/** + * Appender config + */ +public class StreamisLogAppenderConfig { + + protected final String applicationName; + + + protected final RpcLogSenderConfig senderConfig; + + protected StreamisLogAppenderConfig(String applicationName, RpcLogSenderConfig rpcLogSenderConfig){ + this.applicationName = applicationName; + this.senderConfig = null != rpcLogSenderConfig? rpcLogSenderConfig : new RpcLogSenderConfig(); + } + + public static class Builder{ + /** + * Application name + */ + protected String applicationName; + + /** + * Sender config + */ + protected final RpcLogSenderConfig rpcLogSenderConfig; + + public Builder(String applicationName, + RpcLogSenderConfig rpcLogSenderConfig){ + this.applicationName = applicationName; + + this.rpcLogSenderConfig = Optional.ofNullable(rpcLogSenderConfig).orElse(new RpcLogSenderConfig()); + } + + /** + * Set application name + * @param applicationName application name + * @return builder + */ + public StreamisLogAppenderConfig.Builder setAppName(String applicationName){ + this.applicationName = applicationName; + return this; + } + + + + /** + * Rpc address + * @param address address + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAddress(String address){ + this.rpcLogSenderConfig.setAddress(address); + return this; + } + + /** + * Rpc connect timeout + * @param connectionTimeout connection timeout + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcConnTimeout(int connectionTimeout){ + this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout); + return this; + } + + /** + * Rpc socket timeout + * @param socketTimeout socket timeout + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcSocketTimeout(int socketTimeout){ + this.rpcLogSenderConfig.setSocketTimeout(socketTimeout); + return this; + } + + /** + * Rpc send retry count + * @param sendRetryCnt send retry count + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcSendRetryCnt(int sendRetryCnt){ + this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt); + return this; + } + + /** + * Rpc server recovery time in seconds + * @param serverRecoveryTimeInSec server recovery time + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){ + this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec); + return this; + } + + /** + * Rpc max delay time in seconds + * @param maxDelayTimeInSec max delay time in seconds + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){ + this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec); + return this; + } + + /** + * Rpc auth token code key + * @param tokenCodeKey key of token code + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenCodeKey(String tokenCodeKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey); + return this; + } + + /** + * Rpc auth token user key + * @param tokenUserKey key of token user + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenUserKey(String tokenUserKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey); + return this; + } + + /** + * Rpc auth token user + * @param tokenUser token user + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenUser(String tokenUser){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser); + return this; + } + + /** + * Rpc auth token code + * @param tokenCode token code + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenCode(String tokenCode){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode); + return this; + } + + /** + * Rpc cache size + * @param cacheSize cache size + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcCacheSize(int cacheSize){ + this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize); + return this; + } + + /** + * Rpc cache max consume thread + * @param maxConsumeThread max consume thread + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcCacheMaxConsumeThread(int maxConsumeThread){ + this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread); + return this; + } + + /** + * Rpc buffer size + * @param bufferSize buffer size + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcBufferSize(int bufferSize){ + this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize); + return this; + } + + /** + * Rpc buffer expire time in seconds + * @param expireTimeInSec expire time + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcBufferExpireTimeInSec(int expireTimeInSec){ + this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec); + return this; + } + + public StreamisLogAppenderConfig build(){ + return new StreamisLogAppenderConfig(applicationName, rpcLogSenderConfig); + } + } + public String getApplicationName() { + return applicationName; + } + + + public RpcLogSenderConfig getSenderConfig() { + return senderConfig; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java new file mode 100644 index 000000000..3afc34147 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java @@ -0,0 +1,476 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.ImmutableSendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Abstract rpc log sender + * @param + * @param + */ +public abstract class AbstractRpcLogSender implements RpcLogSender{ + + /** + * Size of log cache + */ + int cacheSize; + + /** + * The buffer size of sender + */ + int sendBufSize; + + /** + * Max thread num of send + */ + int maxCacheConsume; + /** + * Connect config + */ + protected RpcLogSenderConfig rpcSenderConfig; + + /** + * Rpc log context + */ + private volatile RpcLogContext rpcLogContext; + + protected boolean isTerminated = false; + /** + * Use the listener instead of log4j structure + */ + protected ExceptionListener exceptionListener; + + public AbstractRpcLogSender(RpcLogSenderConfig rpcSenderConfig){ + this.rpcSenderConfig = rpcSenderConfig; + SendLogCacheConfig cacheConfig = rpcSenderConfig.getCacheConfig(); + this.cacheSize = cacheConfig.getSize(); + this.maxCacheConsume = cacheConfig.getMaxConsumeThread(); + this.sendBufSize = rpcSenderConfig.getBufferConfig().getSize(); + + if (sendBufSize > cacheSize) { + throw new IllegalArgumentException("Size of send buffer is larger than cache size"); + } + + } + + @Override + public LogCache getOrCreateLogCache() { + return getOrCreateRpcLogContext().getLogCache(); + } + + @Override + public void sendLog(T log) { + // Just send it into log cache + try { + getOrCreateLogCache().cacheLog(log); + } catch (InterruptedException e) { + // Invoke exception listener + Optional.ofNullable(exceptionListener).ifPresent(listener -> + listener.onException(this, e, null)); + } + } + + @Override + public void syncSendLog(T log) { + + } + + @Override + public void setExceptionListener(ExceptionListener listener) { + this.exceptionListener = listener; + } + + @Override + public void close() { + getOrCreateRpcLogContext().destroyCacheConsumers(); + this.isTerminated = true; + } + + /** + * Aggregate send buffer for sending + * @param sendBuffer send buffer + * @return E aggregated entity + */ + protected abstract E aggregateBuffer(SendBuffer sendBuffer); + + /** + * Sending operation + * @param aggregatedEntity agg entity + * @param rpcSenderConfig rpc sender config + */ + protected abstract void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws Exception; + + /** + * Send log exception strategy + * @return exception strategy + */ + protected abstract SendLogExceptionStrategy getSendLogExceptionStrategy(); + + protected RpcLogContext getOrCreateRpcLogContext(){ + if (null == this.rpcLogContext){ + synchronized (this){ + if (null == this.rpcLogContext){ + SendLogCache logCache = new QueuedSendLogCache(this.cacheSize, false); + this.rpcLogContext = new RpcLogContext(logCache); + // Start cache consumer + this.rpcLogContext.startCacheConsumer(); + } + } + + } + return this.rpcLogContext; + } + + private class RpcLogContext{ + + private static final String RPC_LOG_CACHE_CONSUMER = "RpcLog-Cache-Consumer-Thread-"; + /** + * Send log cache + */ + private final SendLogCache logCache; + + /** + * Consume pool + */ + private final ThreadPoolExecutor consumePool; + + /** + * Count of the consumers + */ + private int consumers = 0; + + /** + * Futures of consumers + */ + private final Map> sendLogCacheConsumers = new ConcurrentHashMap<>(); + /** + * Context lock + */ + private final ReentrantLock ctxLock; + public RpcLogContext(SendLogCache logCache){ + this.logCache = logCache; + this.ctxLock = new ReentrantLock(); + this.consumePool = new ThreadPoolExecutor(0, maxCacheConsume, + 60L, TimeUnit.SECONDS, + new SynchronousQueue<>(), new ThreadFactory() { + private final ThreadGroup group = Thread.currentThread().getThreadGroup(); + private final AtomicInteger threadNum = new AtomicInteger(1); + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(group, r, RPC_LOG_CACHE_CONSUMER + + threadNum.getAndIncrement(), 0); + if (t.isDaemon()) { + t.setDaemon(false); + } + if (t.getPriority() != Thread.NORM_PRIORITY) { + t.setPriority(Thread.NORM_PRIORITY); + } + return t; + } + }); + } + + public void startCacheConsumer(){ + this.ctxLock.lock(); + try { + if (consumers >= maxCacheConsume) { + throw new IllegalStateException("Over the limit number of cache consumers: [" + maxCacheConsume + "]"); + } + String id = UUID.randomUUID().toString(); + SendBuffer sendBuffer = new ImmutableSendBuffer<>(sendBufSize); + SendLogCacheConsumer consumer = new SendLogCacheConsumer(id, logCache, sendBuffer, rpcSenderConfig) { + @Override + protected void onFlushAndSend(SendBuffer sendBuffer) { + // First to aggregate the buffer + E aggEntity = aggregateBuffer(sendBuffer); + Optional.ofNullable(getSendLogExceptionStrategy()).ifPresent( + strategy -> strategy.doSend(() -> { + doSend(aggEntity, rpcSenderConfig); + return null; + }, sendBuffer)); + } + }; + Future future = this.consumePool.submit(consumer); + consumer.setFuture(future); + sendLogCacheConsumers.put(id, consumer); + this.consumers++; + } finally { + this.ctxLock.unlock(); + } + } + + public SendLogCache getLogCache(){ + return this.logCache; + } + + /** + * Destroy cache consumer + * @param id id + */ + public void destroyCacheConsumer(String id){ + SendLogCacheConsumer consumer = sendLogCacheConsumers.remove(id); + consumer.shutdown(); + } + + /** + * Destroy all the consumers + */ + public void destroyCacheConsumers(){ + this.ctxLock.lock(); + try { + sendLogCacheConsumers.forEach( (key, consumer)-> consumer.shutdown()); + } finally { + this.ctxLock.unlock(); + } + } + } + /** + * Act as ArrayBlockingQueue (jdk 1.8) + */ + private class QueuedSendLogCache implements SendLogCache{ + + // Queued items + final Object[] items; + + // Take index + int takeIndex; + + // Put index + int putIndex; + + // Count + int count; + + // Reentrant lock + final ReentrantLock lock; + + // Condition for waiting takes + private final Condition notEmpty; + + // Condition for waiting puts(cacheLog) + private final Condition notFull; + + public QueuedSendLogCache(int capacity, boolean fair) { + this.items = new Object[capacity]; + lock = new ReentrantLock(fair); + this.notEmpty = lock.newCondition(); + this.notFull = lock.newCondition(); + } + + @Override + public void cacheLog(T logElement) throws InterruptedException { + // Skip the null element + if (Objects.nonNull(logElement)){ + final ReentrantLock lock = this.lock; + lock.lockInterruptibly(); + try{ + while (count == items.length){ + notFull.await(); + } + enqueue(logElement); + }finally{ + lock.unlock(); + } + } + } + + @Override + public int drainLogsTo(List elements, int maxElements) { + if (Objects.nonNull(elements) && maxElements > 0){ + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try{ + int n = Math.min(maxElements, count); + int take = takeIndex; + int i = 0; + try { + while (i < n){ + @SuppressWarnings("unchecked") + T x = (T) items[take]; + elements.add(x); + items[take] = null; + if (++ take == items.length) + take = 0; + i++; + } + return n; + }finally { + restoreInvariants(i, take, false); + } + } finally { + lock.unlock(); + } + } + return 0; + } + + // Equal to the poll method in ArrayBlockingQueue + @Override + public T takeLog(long timeout, TimeUnit unit) throws InterruptedException { + long nanos = unit.toNanos(timeout); + final ReentrantLock lock = this.lock; + T element; + lock.lockInterruptibly(); + try{ + while (count == 0){ + if (nanos < 0){ + return null; + } + nanos = notEmpty.awaitNanos(nanos); + } + element = dequeue(); + } finally { + lock.unlock(); + } + return element; + } + + @Override + public boolean isCacheable() { + final ReentrantLock lock = this.lock; + lock.lock(); + try { + return count < items.length; + }finally { + lock.unlock(); + } + } + + // The same as the clear() method, + @Override + public void destroy() { + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try { + int k = count; + if (k > 0) { + final int putIndex = this.putIndex; + int i = takeIndex; + do { + items[i] = null; + if (++i == items.length) + i = 0; + } while (i != putIndex); + takeIndex = putIndex; + count = 0; + for (; k > 0 && lock.hasWaiters(notFull); k--) + notFull.signal(); + } + } finally { + lock.unlock(); + } + } + + /** + * Drain the elements into send buffer + * @param sendBuffer send buffer + * @param maxElements max element size + * @return int + */ + @Override + public int drainLogsTo(SendBuffer sendBuffer, int maxElements) { + if (Objects.nonNull(sendBuffer) && maxElements > 0){ + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try{ + int n = Math.min(maxElements, count); + int take = takeIndex; + int i = 0; + int send; + try { + while (n > 0) { + int len = items.length - take; + int send0 = Math.min(n, len); + // Copy the array element to buffer directly + send = sendBuf(sendBuffer, this.items, take, send0); + n -= send; + if ((take = take + send) >= items.length) { + take = 0; + } + i += send; + if (send < send0 || send <= 0) { + break; + } + } + return i; + } finally { + if (i > 0){ + restoreInvariants(i, take, true); + } + } + }finally { + lock.unlock(); + } + } + return 0; + } + + @SuppressWarnings("unchecked") + private int sendBuf(SendBuffer sendBuffer, Object[] items, int takeIndex, int len){ + int send = sendBuffer.writeBuf(items, takeIndex, len); + if (send < len){ + // Buffer full exception + exceptionListener.onException(this, null, "The sender buffer is full," + + " expected: [" + len + "], actual: [" + send + "]"); + } + // Allow data loss + return send; + } + + private void restoreInvariants(int i, int take, boolean clearItems){ + this.count -= i; + if (clearItems){ + int index = this.takeIndex; + int j = i; + for (; j > 0; j --){ + this.items[index] = null; + if (++index == items.length){ + index = 0; + } + } + //At last index equals take + } + this.takeIndex = take; + for (; i > 0 && lock.hasWaiters(notFull); i--){ + notFull.signal(); + } + } + // Inserts element at current put position, advances, and signals. Call only when holding lock. + private void enqueue(T element){ + this.items[putIndex] = element; + if (++putIndex >= items.length){ + putIndex = 0; + } + count ++; + notEmpty.signal(); + } + + // Extracts element at current take position, advances, and signals. Call only when holding lock. + private T dequeue(){ + @SuppressWarnings("unchecked") + T element = (T)this.items[takeIndex]; + this.items[takeIndex] = null; + if ( ++ takeIndex == items.length){ + this.takeIndex = 0; + } + count --; + // Not need to support iterator + notFull.signal(); + return element; + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java new file mode 100644 index 000000000..8254f0a34 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Rpc Log sender + */ +public interface RpcLogSender { + + /** + * Produce log cache + * @return log cache + */ + LogCache getOrCreateLogCache(); + + /** + * Send log (async) + * @param log log element + */ + void sendLog(T log); + + /** + * Send log (sync) + * @param log log element + */ + void syncSendLog(T log); + + /** + * Exception listener + * @param listener listener + */ + void setExceptionListener(ExceptionListener listener); + /** + * Close sender + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java new file mode 100644 index 000000000..200c573d9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Send log cache + * @param + */ +public interface SendLogCache extends LogCache { + + /** + * Drain the logs into send buffer + * @param sendBuffer send buffer + * @param maxElements max element size + * @return count + */ + int drainLogsTo(SendBuffer sendBuffer, int maxElements);; +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java new file mode 100644 index 000000000..fac98b90a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +/** + * Send log consumer + * Consume the log elements from cache and put into send buffer + * @param + */ +public abstract class SendLogCacheConsumer implements Runnable{ + + private boolean isTerminated = false; + + /** + * Buffer expire time in milliseconds + */ + private final long bufferExpireTimeInMills; + /** + * Send log cache + */ + private final SendLogCache cache; + + /** + * Send buffer + */ + private final SendBuffer sendBuffer; + + private final String id; + + /** + * Future for execution + */ + private Future future; + + public SendLogCacheConsumer(String id, SendLogCache cache, + SendBuffer sendBuffer, + RpcLogSenderConfig rpcSenderConfig){ + this.id = id; + this.cache = cache; + this.sendBuffer = sendBuffer; + long expireTimeInSec = rpcSenderConfig.getBufferConfig().getExpireTimeInSec(); + this.bufferExpireTimeInMills = expireTimeInSec > 0 ? TimeUnit.SECONDS + .toMillis(expireTimeInSec) : -1; + + } + + @Override + public void run() { + int remain; + long expireTimeInMills = requireNewFlushTime(); + int capacity = sendBuffer.capacity(); + while (!this.isTerminated) { + try { + remain = this.sendBuffer.remaining(); + if ((expireTimeInMills > 0 && expireTimeInMills <= System.currentTimeMillis()) || remain <= 0) { + // Transient to the read mode + if (remain < capacity) { + sendBuffer.flip(); + onFlushAndSend(sendBuffer); + } + expireTimeInMills = requireNewFlushTime(); + if (sendBuffer.isReadMode()) { + // Clear the buffer and transient to the write mode, otherwise continue writing + sendBuffer.clear(); + } + remain = this.sendBuffer.remaining(); + } + if (remain > 0) { + int inBuf = this.cache.drainLogsTo(sendBuffer, remain); + if (inBuf < remain) { + // Means that the cache is empty, take and wait the log element + long waitTime = expireTimeInMills - System.currentTimeMillis(); + if (waitTime > 0) { + T logElement = this.cache.takeLog(waitTime, TimeUnit.MILLISECONDS); + if (null != logElement) { + sendBuffer.writeBuf(logElement); + } + } + } + } + } catch (Throwable e){ + if (this.isTerminated && e instanceof InterruptedException){ + return; + } else { + e.printStackTrace(); + System.err.println("SendLogCacheConsumer[" + Thread.currentThread().getName() + "] occurred exception [" + e.getLocalizedMessage() + "]"); + // For the unknown exception clear the cache + sendBuffer.clear(); + expireTimeInMills = requireNewFlushTime(); + } + try { + Thread.sleep(500); + } catch (InterruptedException ex) { + // Ignore + } + } + } + } + + public void shutdown(){ + this.isTerminated = true; + if (null != this.future){ + this.future.cancel(true); + } + } + + public Future getFuture() { + return future; + } + + public void setFuture(Future future) { + this.future = future; + } + + private long requireNewFlushTime(){ + return bufferExpireTimeInMills > 0 ? System.currentTimeMillis() + bufferExpireTimeInMills : -1; + } + /** + * When the buffer is full or reach the idle time, invoke the method + * @param sendBuffer send buffer + */ + protected abstract void onFlushAndSend(SendBuffer sendBuffer); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java new file mode 100644 index 000000000..d33b7d2e4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.Objects; +import java.util.concurrent.Callable; + +/** + * Strategy control the action on exception + */ +public abstract class SendLogExceptionStrategy { + + protected final RpcLogSender sender; + + public SendLogExceptionStrategy(RpcLogSender sender){ + this.sender = sender; + } + /** + * Retry count + * @return retry + */ + public abstract int retryCount(); + + /** + * + * @param e exception + * @return boolean + */ + public abstract RetryDescription onException(Exception e, SendBuffer sendBuffer); + + V doSend(Callable sendOperation, SendBuffer sendBuffer){ + int retryCount = retryCount(); + int count = 0; + RetryDescription retryDescription; + while (++count <= retryCount) { + try { + return sendOperation.call(); + } catch (Exception e) { + retryDescription = onException(e, sendBuffer); + if (Objects.isNull(retryDescription) || !retryDescription.canRetry) { + break; + } + } + } + return null; + } + + protected static class RetryDescription{ + + private final boolean canRetry; + + public RetryDescription(boolean canRetry){ + this.canRetry = canRetry; + } + + public boolean isCanRetry() { + return canRetry; + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java new file mode 100644 index 000000000..d019c29f5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java @@ -0,0 +1,45 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.AbstractHttpLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; + +/** + * Log sender for streamis + */ +public class StreamisRpcLogSender extends AbstractHttpLogSender { + + /** + * Each sender register an application + */ + private final String applicationName; + + public StreamisRpcLogSender(String applicationName, RpcLogSenderConfig rpcSenderConfig) { + super(rpcSenderConfig); + this.applicationName = applicationName; + } + + /** + * Aggregate to streamis log events + * @param sendBuffer send buffer + * @return + */ + @Override + protected StreamisLogEvents aggregateBuffer(SendBuffer sendBuffer) { + int remain = sendBuffer.remaining(); + if (remain > 0) { + StreamisLogEvent[] logEvents = new StreamisLogEvent[remain]; + sendBuffer.readBuf(logEvents, 0, logEvents.length); + return new StreamisLogEvents(applicationName, logEvents); + } + return null; + } + + @Override + protected String convertToJsonString(StreamisLogEvents aggregatedEntity) { + return aggregatedEntity.toJson(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java new file mode 100644 index 000000000..1b42ad957 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java @@ -0,0 +1,135 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +/** + * Abstract sender buffer; + * non-blocking and reduces out-of-bounds exceptions + */ +public abstract class AbstractSendBuffer implements SendBuffer{ + + protected enum Flag{ + WRITE_MODE, READ_MODE + } + + /** + * Access flag + */ + private Flag accessFlag = Flag.WRITE_MODE; + + private int position = 0; + private int limit; + /** + * The capacity is mutable + */ + protected int capacity; + + + public AbstractSendBuffer(int capacity){ + this.capacity = capacity; + limit(this.capacity); + } + + public AbstractSendBuffer(){ + this(Integer.MAX_VALUE); + } + + @Override + public boolean isReadMode() { + return accessFlag == Flag.READ_MODE; + } + + @Override + public boolean isWriteMode() { + return accessFlag == Flag.WRITE_MODE; + } + + @Override + public int capacity() { + return this.capacity; + } + + @Override + public int remaining() { + int rem = this.limit - this.position; + return Math.max(rem, 0); + } + + @Override + public void flip() { + checkFlag(Flag.WRITE_MODE); + this.limit = this.position; + this.position = 0; + this.accessFlag = Flag.READ_MODE; + } + + @Override + public void rewind() { + position = 0; + } + + @Override + public void clear() { + limit(this.capacity); + this.position = 0; + this.accessFlag = Flag.WRITE_MODE; + clearBuf(); + } + + /** + * Change the limit value + * @param newLimit new limit + */ + final void limit(int newLimit){ + if (newLimit > this.capacity || (newLimit < 0)){ + throw new IllegalArgumentException("Set the illegal limit value: " + newLimit + " in send buffer, [capacity: " + this.capacity + "]"); + } + this.limit = newLimit; + if (this.position > newLimit){ + this.position = newLimit; + } + } + + /** + * Inc the position with offset + * @param offset offset value + * @param accessFlag access flag + * @return the current position value + */ + final int nextPosition(int offset, Flag accessFlag){ + checkFlag(accessFlag); + int p = position; + // Reach the limit, return -1 value + if (p >= limit){ + return -1; + } + if (p + offset > limit){ + this.position = limit; + } else { + this.position = p + offset; + } + return p; + } + + final void checkFlag(Flag accessFlag){ + if (this.accessFlag != accessFlag){ + throw new IllegalStateException("Illegal access flag [" + accessFlag + "] for send buffer"); + } + } + final void setFlag(Flag accessFlag){ + this.accessFlag = accessFlag; + } + /** + * + * @return the current position + */ + final int position(){ + return this.position; + } + + final void position(int position){ + this.position = position; + } + /** + * Do the actual clear + */ + protected abstract void clearBuf(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java new file mode 100644 index 000000000..0e64c4ffa --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java @@ -0,0 +1,102 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +import java.util.Arrays; +import java.util.function.Function; + +/** + * Immutable send buffer (use array) + */ +public class ImmutableSendBuffer extends AbstractSendBuffer{ + + /** + * Buffer object array + */ + private final Object[] buf; + + public ImmutableSendBuffer(int capacity) { + super(capacity); + buf = new Object[capacity]; + } + + @Override + protected void clearBuf() { + // Release the memory occupied + Arrays.fill(buf, null); + } + + @Override + public void capacity(String newCapacity) { + throw new IllegalArgumentException("Unsupported to scale-in/scale-up the send buffer"); + } + + @Override + @SuppressWarnings("all") + public int writeBuf(Object[] elements, int srcIndex, int length) { + if (srcIndex < elements.length){ + int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.WRITE_MODE); + if (startPos >= 0){ + int writes = position() - startPos; + System.arraycopy(elements, srcIndex, this.buf, startPos, writes); + return writes; + } + } + return -1; + } + + @Override + @SuppressWarnings("all") + public int readBuf(Object[] elements, int srcIndex, int length) { + if (srcIndex < elements.length){ + int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.READ_MODE); + if (startPos >= 0){ + int reads = position() - startPos; + System.arraycopy(this.buf, startPos, elements, srcIndex, reads); + return reads; + } + } + return -1; + } + + @Override + public int writeBuf(E element) { + int startPos = nextPosition(1, Flag.WRITE_MODE); + if (startPos >= 0){ + buf[startPos] = element; + return 1; + } + return -1; + } + + @Override + @SuppressWarnings("unchecked") + public E readBuf() { + int startPos = nextPosition(1, Flag.READ_MODE); + if (startPos >= 0){ + return (E)buf[startPos]; + } + return null; + } + + @Override + @SuppressWarnings("unchecked") + public SendBuffer compact(Function dropAble) { + checkFlag(Flag.READ_MODE); + int offset = 0; + int compact = position() - 1; + for(int i = position(); i < capacity; i ++){ + Object element = buf[i]; + if (dropAble.apply((E)element)){ + buf[i] = null; + offset ++; + } else { + compact = i - offset; + buf[compact] = element; + } + } + position(compact + 1); + limit(this.capacity); + setFlag(Flag.WRITE_MODE); + return this; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java new file mode 100644 index 000000000..0a98580fb --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +import java.util.function.Function; + +/** + * Buffer for Rpc sender + * @param buffer element + */ +public interface SendBuffer { + + /** + * Capacity + * @return int + */ + int capacity(); + + /** + * Is read mode + * @return boolean + */ + boolean isReadMode(); + + /** + * Is write mode + * @return boolean + */ + boolean isWriteMode(); + /** + * Scale-up or scale-in + * @param newCapacity new capacity + */ + void capacity(String newCapacity); + /** + * Remain size + * (remain space for writing or remain elements for reading) + * @return int + */ + int remaining(); + + /** + * Transient between write-mode and read-mode + */ + void flip(); + + /** + * Restart from the beginning of window + */ + void rewind(); + /** + * Clear to reuse the buffer + */ + void clear(); + /** + * Write buffer element + * @param element element + * @return if succeed + */ + int writeBuf(E element); + + /** + * Write buffer element array + * @param elements elements + * @param srcIndex the src index in elements + * @param length the length to read + * @return write num + */ + int writeBuf(Object[] elements, int srcIndex, int length); + + /** + * Read buffer element + * @return element + */ + E readBuf(); + + /** + * Read buffer element array + * @param elements elements + * @param srcIndex the src index in elements + * @param length the length to write + * @return read num + */ + int readBuf(Object[] elements, int srcIndex, int length); + + /** + * Compact the buffer, avoid the useless elements + * @param dropAble drop function + * @return send buffer + */ + SendBuffer compact(Function dropAble); + + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java new file mode 100644 index 000000000..c693d0152 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java @@ -0,0 +1,163 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.AbstractRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.SendLogExceptionStrategy; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request.StringPostAction; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.HttpResponseException; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.conn.ConnectTimeoutException; + +import javax.net.ssl.SSLException; +import java.io.*; +import java.net.UnknownHostException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +public abstract class AbstractHttpLogSender extends AbstractRpcLogSender { + + /** + * Retry strategy + */ + private final SendLogExceptionStrategy sendRetryStrategy; + + /** + * Exception counter + */ + private final AtomicInteger exceptionCounter = new AtomicInteger(); + /** + * Hold the global http client + */ + private final HttpClient globalHttpClient; + + /** + * Recover time point + */ + private final AtomicLong serverRecoveryTimePoint = new AtomicLong(-1L); + + public AbstractHttpLogSender(RpcLogSenderConfig rpcSenderConfig) { + super(rpcSenderConfig); + this.globalHttpClient = HttpClientTool.createHttpClient(rpcSenderConfig); + this.sendRetryStrategy = new SendLogExceptionStrategy(this) { + + private final Class[] retryOnExceptions = new Class[]{ + InterruptedIOException.class, UnknownHostException.class, + ConnectTimeoutException.class, SSLException.class}; + @Override + public int retryCount() { + return rpcSenderConfig.getSendRetryCnt(); + } + + @Override + public SendLogExceptionStrategy.RetryDescription onException(Exception e, SendBuffer sendBuffer) { + boolean shouldRetry = false; + // Limit of exception number is the same as the retry times + if (exceptionCounter.incrementAndGet() > retryCount()){ + serverRecoveryTimePoint.set(System.currentTimeMillis() + + TimeUnit.SECONDS.toMillis(rpcSenderConfig.getServerRecoveryTimeInSec())); + } else { + for (Class retryOnException : retryOnExceptions) { + if (retryOnException.equals(e.getClass())) { + shouldRetry = true; + break; + } + } + if (!shouldRetry && e instanceof HttpResponseException){ + if (((HttpResponseException) e).getStatusCode() < 500){ + shouldRetry = true; + } + } + } + if (shouldRetry && !sender.getOrCreateLogCache().isCacheable()){ + // Means that the cache is full + // Set the position of buffer to 0 + sendBuffer.rewind(); + // Compact the buffer and transient to write mode; + sendBuffer.compact( element -> element.mark() > 1); + shouldRetry = false; + } + Optional.ofNullable(exceptionListener).ifPresent(listener -> listener.onException(sender, e, null)); + return new RetryDescription(shouldRetry); + } + }; + } + + @Override + protected SendLogExceptionStrategy getSendLogExceptionStrategy() { + return this.sendRetryStrategy; + } + + @Override + protected void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws IOException { + if (System.currentTimeMillis() >= serverRecoveryTimePoint.get()) { + if (aggregatedEntity instanceof LogElement) { + long timestamp = ((LogElement) aggregatedEntity).getLogTimeStamp(); + if (System.currentTimeMillis() - timestamp > rpcSenderConfig.getMaxDelayTimeInSec() * 1000L) { + // Abort the entity + return; + } + } + String address = rpcSenderConfig.getAddress(); + if (null != address && !address.trim().equals("")) { + StringPostAction postAction = new StringPostAction(rpcSenderConfig.getAddress(), convertToJsonString(aggregatedEntity)); + RpcAuthConfig authConfig = rpcSenderConfig.getAuthConfig(); + postAction.getRequestHeaders().put(authConfig.getTokenUserKey(), authConfig.getTokenUser()); + HttpResponse response = null; + try { + response = postAction.execute(this.globalHttpClient); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode > 200){ + throw new HttpResponseException(statusCode, + convertToString(response.getEntity().getContent(), StandardCharsets.UTF_8)); + } + }finally { + // Close the response and release the conn + if (null != response){ + if (response instanceof CloseableHttpResponse){ + ((CloseableHttpResponse)response).close(); + } else { + // Destroy the stream + response.getEntity().getContent().close(); + } + } + } + // Init the counter + this.exceptionCounter.set(0); + } + } + } + + /** + * Convert input to string + * @param inputStream input stream + * @param charset charset + * @return string value + * @throws IOException + */ + private String convertToString(InputStream inputStream, Charset charset) throws IOException { + StringBuilder builder = new StringBuilder(); + try(BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, charset))){ + String line; + while((line = reader.readLine()) != null){ + builder.append(line); + } + } + return builder.toString(); + } + + /** + * Convert the entity to json + * @param aggregatedEntity aggregated entity + * @return json string + */ + protected abstract String convertToJsonString(E aggregatedEntity); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java new file mode 100644 index 000000000..e67bec6da --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import org.apache.http.Header; +import org.apache.http.client.HttpClient; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicHeader; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Http clients + */ +public class HttpClientTool { + + /** + * Connect timeout + */ + public static final int DEFAULT_CONNECT_TIMEOUT = 3000; + + /** + * Socket timeout + */ + public static final int DEFAULT_SOCKET_TIMEOUT = 15000; + + /** + * Max connections + */ + public static final int DEFAULT_MAX_CONN = 10; + + /** + * Create http client + * @param rpcSenderConfig rpc sender config + * @return http client + */ + public static HttpClient createHttpClient(RpcLogSenderConfig rpcSenderConfig){ + int connectTimeout = rpcSenderConfig.getConnectionTimeout() > 0? rpcSenderConfig.getConnectionTimeout() : DEFAULT_CONNECT_TIMEOUT; + int socketTimeout = rpcSenderConfig.getSocketTimeout() > 0? rpcSenderConfig.getSocketTimeout() : DEFAULT_SOCKET_TIMEOUT; + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(connectTimeout) + .setConnectionRequestTimeout(socketTimeout) + .setSocketTimeout(socketTimeout) + .build(); + int maxConsumeThread = rpcSenderConfig.getCacheConfig().getMaxConsumeThread(); + int maxConn = maxConsumeThread > 0? maxConsumeThread : DEFAULT_MAX_CONN; + HttpClientBuilder clientBuilder = HttpClients.custom(); + String tokenValue = rpcSenderConfig.getAuthConfig().getTokenCode(); + List
defaultHeaders = new ArrayList<>(); + if (null != tokenValue && !tokenValue.trim().equals("")){ + defaultHeaders.add(new BasicHeader(rpcSenderConfig.getAuthConfig().getTokenCodeKey(), tokenValue)); + } + clientBuilder.setDefaultRequestConfig(requestConfig).setDefaultHeaders(defaultHeaders) + .useSystemProperties().setMaxConnTotal(maxConn); + CloseableHttpClient httpClient = clientBuilder.build(); + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + try { + httpClient.close(); + } catch (IOException e) { + // Ignore + } + } + })); + return httpClient; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java new file mode 100644 index 000000000..eaa355e92 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.entities; + +import java.io.File; +import java.util.List; + +/** + * Entity with resources + */ +public interface Resource { + + /** + * Resources related + * @return file list + */ + List getResources(); + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java new file mode 100644 index 000000000..143f72b8e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpRequestBase; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +/** + * Abstract implement + * @param + */ +public abstract class AbstractHttpAction implements HttpAction { + + protected String uri; + + protected String user; + + public AbstractHttpAction(String uri){ + this.uri = uri; + } + + @Override + public String uri() { + return uri; + } + + /** + * Request method + * @return method + */ + protected abstract T getRequestMethod(); + + private Map requestHeaders = new HashMap<>(); + + private Map requestPayload = new HashMap<>(); + + @Override + public Map getRequestHeaders() { + return this.requestHeaders; + } + + @Override + public Map getRequestPayload() { + return this.requestPayload; + } + + @Override + public HttpResponse execute(HttpClient httpClient) throws IOException { + HttpRequestBase requestBase = getRequestMethod(); + try{ + requestBase.setURI(new URI(uri)); + } catch (URISyntaxException e) { + throw new IllegalArgumentException("URI maybe has wrong format", e); + } + requestHeaders.forEach(requestBase::setHeader); + return httpClient.execute(requestBase); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java new file mode 100644 index 000000000..f5a8a5fef --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +public class GetAction { +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java new file mode 100644 index 000000000..87435f8a3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; + +import java.io.IOException; +import java.util.Map; + +/** + * Http action + */ +public interface HttpAction { + + /** + * URI path + * @return path + */ + String uri(); + + /** + * Request headers + * @return map + */ + Map getRequestHeaders(); + + /** + * Request pay load(body) + * @return map + */ + Map getRequestPayload(); + + /** + * Execute http action + * @return http response + */ + HttpResponse execute(HttpClient httpClient) throws IOException; + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java new file mode 100644 index 000000000..6ce0d8cdf --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; + +/** + * Use string to request + */ +public class StringPostAction extends AbstractHttpAction { + + /** + * Raw string value + */ + private final String rawString; + public StringPostAction(String uri, String rawString) { + super(uri); + this.rawString = rawString; + } + + @Override + protected HttpPost getRequestMethod() { + HttpPost httpPost = new HttpPost(); + StringEntity stringEntity = new StringEntity(rawString, "UTF-8"); + stringEntity.setContentType(ContentType.APPLICATION_JSON.toString()); + httpPost.setEntity(stringEntity); + return httpPost; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java new file mode 100644 index 000000000..980a6aec5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.plugin; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; + +/** + * Streamis config autowired + */ +public interface StreamisConfigAutowired { + + /** + * Log appender config + * @param builder builder + */ + StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception; +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml new file mode 100644 index 000000000..55fc60818 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml @@ -0,0 +1,64 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector + + + 8 + 8 + 2.17.1 + 1.7.15 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector-core + ${streamis.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-api + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-core + ${log4j.version} + provided + + + + junit + junit + ${junit.version} + test + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java new file mode 100644 index 000000000..c4674106f --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java @@ -0,0 +1,106 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.ServiceLoader; + +/** + * Streamis rpc log appender + */ +@Plugin(name = "StreamRpcLog", category = "Core", elementType = "appender", printObject = true) +public class StreamisRpcLogAppender extends AbstractAppender { + private static final String DEFAULT_APPENDER_NAME = "StreamRpcLog"; + /** + * Appender config + */ + private final StreamisLogAppenderConfig appenderConfig; + + /** + * Rpc log sender + */ + private final StreamisRpcLogSender rpcLogSender; + + /** + * Cache + */ + private final LogCache logCache; + protected StreamisRpcLogAppender(String name, Filter filter, + Layout layout, + boolean ignoreExceptions, Property[] properties, + StreamisLogAppenderConfig appenderConfig) { + super(name, filter, layout, ignoreExceptions, properties); + this.appenderConfig = appenderConfig; + this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(), + this.appenderConfig.getSenderConfig()); + this.rpcLogSender.setExceptionListener((subject, t, message) -> + LOGGER.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t)); + this.logCache = this.rpcLogSender.getOrCreateLogCache(); + Runtime.getRuntime().addShutdownHook(new Thread(this.rpcLogSender::close)); + } + + @Override + public void append(LogEvent event) { + String content = new String(getLayout().toByteArray(event)); + // Transform to stream log event; + StreamisLogEvent logEvent = new StreamisLogEvent(content, System.currentTimeMillis()); + try { + this.logCache.cacheLog(logEvent); + } catch (InterruptedException e) { + LOGGER.error("StreamisRpcLogAppender: {} interrupted when cache the log into the RPC sender, message: {}", this.getName(), e.getMessage()); + } + } + + @PluginFactory + public static StreamisRpcLogAppender createAppender(@PluginAttribute("name") String name, + @PluginAttribute("appName") String applicationName, + @PluginAttribute("ignoreExceptions") boolean ignoreExceptions, + @PluginElement("Filter") final Filter filter, + @PluginElement("Layout") Layout layout, + @PluginElement("RpcLogSender")RpcLogSenderConfig rpcLogSenderConfig) throws Exception{ + if (null == name || name.trim().equals("")){ + name = DEFAULT_APPENDER_NAME; + } + if (Objects.isNull(layout)){ + layout = PatternLayout.createDefaultLayout(); + } + // Search the config autowired class + List configAutowiredEntities = new ArrayList<>(); + StreamisLog4j2AppenderConfig logAppenderConfig = null; + ServiceLoader.load(StreamisConfigAutowired.class, + StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add); + StreamisLog4j2AppenderConfig.Builder builder = new StreamisLog4j2AppenderConfig.Builder(applicationName, filter, rpcLogSenderConfig); + for (StreamisConfigAutowired autowired : configAutowiredEntities){ + logAppenderConfig = (StreamisLog4j2AppenderConfig) autowired.logAppenderConfig(builder); + } + if (Objects.isNull(logAppenderConfig)){ + logAppenderConfig = builder.build(); + } + applicationName = logAppenderConfig.getApplicationName(); + if (null == applicationName || applicationName.trim().equals("")){ + throw new IllegalArgumentException("Application name cannot be empty"); + } + System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig); + return new StreamisRpcLogAppender(name, logAppenderConfig.getFilter(), layout, ignoreExceptions, Property.EMPTY_ARRAY, logAppenderConfig); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java new file mode 100644 index 000000000..f68d77210 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java @@ -0,0 +1,88 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.filter.CompositeFilter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Appender config for log4j2 + */ +public class StreamisLog4j2AppenderConfig extends StreamisLogAppenderConfig { + /** + * Filter in log4j2 + */ + private final Filter filter; + + public StreamisLog4j2AppenderConfig(String applicationName, Filter filter, + RpcLogSenderConfig rpcLogSenderConfig){ + super(applicationName, rpcLogSenderConfig); + this.filter = filter; + } + + public static class Builder extends StreamisLogAppenderConfig.Builder { + + /** + * Filter rules + */ + private final List filters = new ArrayList<>(); + + public Builder(String applicationName, Filter filter, RpcLogSenderConfig rpcLogSenderConfig) { + super(applicationName, rpcLogSenderConfig); + if (Objects.nonNull(filter)) { + this.filters.add(filter); + } + } + + /** + * Set filter + * @param filter filter + * @return builder + */ + public StreamisLog4j2AppenderConfig.Builder setFilter(Filter filter){ + this.filters.clear(); + this.filters.add(filter); + return this; + } + + /** + * Append filter + * @param filter filter + * @return builder + */ + public StreamisLog4j2AppenderConfig.Builder withFilter(Filter filter){ + filters.add(filter); + return this; + } + + /** + * Build method + * @return config + */ + public StreamisLog4j2AppenderConfig build(){ + Filter logFilter = null; + if (filters.size() > 1){ + logFilter = CompositeFilter.createFilters(filters.toArray(new Filter[0])); + } else if (!filters.isEmpty()){ + logFilter = filters.get(0); + } + return new StreamisLog4j2AppenderConfig(applicationName, logFilter, rpcLogSenderConfig); + } + } + public Filter getFilter() { + return filter; + } + + @Override + public String toString() { + return "StreamisLog4j2AppenderConfig{" + + "applicationName='" + applicationName + '\'' + + ", senderConfig=" + senderConfig + + ", filter=" + filter + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java new file mode 100644 index 000000000..87a10ba85 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; + +/** + * AuthConfig Element in log4j2 + */ +@Plugin( + name = "AuthConfig", + category = "Core", + printObject = true +) +public class RpcAuthConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig { + + public RpcAuthConfig(){ + super(); + } + public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser) { + super(tokenCodeKey, tokenCode, tokenUserKey, tokenUser); + } + + @PluginFactory + public static RpcAuthConfig createRpcAuthConfig(@PluginAttribute("tokenCodeKey") String tokenCodeKey, + @PluginAttribute("tokenCode") String tokenCode, + @PluginAttribute("tokenUserKey") String tokenUserKey, @PluginAttribute("tokenUser") String tokenUser){ + return new RpcAuthConfig(tokenCodeKey, tokenCode, tokenUserKey, tokenUser); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java new file mode 100644 index 000000000..7be526465 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +/** + * Rpc sender configuration + */ +@Plugin( + name = "RpcLogSender", + category = "Core", + printObject = true +) +public class RpcLogSenderConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig { + + public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, int serverRecoveryTimeInSec, int maxDelayTimeInSec, + RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig) { + super(address, sendRetryCnt, connectionTimeout, socketTimeout, serverRecoveryTimeInSec, maxDelayTimeInSec, authConfig, cacheConfig, bufferConfig); + } + + @PluginFactory + public static RpcLogSenderConfig createConfig( + @PluginAttribute("address") String address, @PluginAttribute("sendRetryCnt") String sendRetryCnt, + @PluginAttribute("connectionTimeout") String connectionTimeout, @PluginAttribute("socketTimeout") String socketTimeout, + @PluginAttribute("serverRecoveryTimeInSec") String serverRecoveryTimeInSec, @PluginAttribute("maxDelayTimeInSec") String maxDelayTimeInSec, + @PluginElement("AuthConfig")RpcAuthConfig authConfig, @PluginElement("SendLogCache") SendLogCacheConfig cacheConfig, + @PluginElement("SendBuffer")SendBufferConfig bufferConfig){ + return new RpcLogSenderConfig(address, Integers.parseInt(sendRetryCnt, 3), + Integers.parseInt(connectionTimeout, 3000), Integers.parseInt(socketTimeout, 15000), + Integers.parseInt(serverRecoveryTimeInSec, 5), Integers.parseInt(maxDelayTimeInSec, 60), + authConfig, cacheConfig, bufferConfig); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java new file mode 100644 index 000000000..936accd72 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +@Plugin( + name = "SendBuffer", + category = "Core", + printObject = true +) +public class SendBufferConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendBufferConfig { + + public SendBufferConfig() { + } + + public SendBufferConfig(int size, long expireTimeInSec) { + super(size, expireTimeInSec); + } + + @PluginFactory + public static SendBufferConfig createBufferConfig( + @PluginAttribute("size") String size, @PluginAttribute("expireTimeInSec") String expireTimeInSec){ + return new SendBufferConfig(Integers.parseInt(size, 50), + Integers.parseInt(expireTimeInSec, 2)); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java new file mode 100644 index 000000000..f4a63c49c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +/** + * Cache config + */ +@Plugin( + name = "SendLogCache", + category = "Core", + printObject = true +) +public class SendLogCacheConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig { + + public SendLogCacheConfig(int size, int maxConsumeThread) { + super(size, maxConsumeThread); + } + + @PluginFactory + public static SendLogCacheConfig createCacheConfig( + @PluginAttribute("size") String size, @PluginAttribute("maxConsumeThread") String maxConsumeThread){ + return new SendLogCacheConfig(Integers.parseInt(size, 150), Integers.parseInt(maxConsumeThread, 10)); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java new file mode 100644 index 000000000..2cec0ec46 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamisLogAppenderTest { + private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class); + @Test + public void appenderLog() throws InterruptedException { + int total = 1000; + int tps = 100; + long timer = System.currentTimeMillis() + 1000; + for(int i = 0; i < total; i ++){ + if (i > 0 && i % tps == 0){ + long sleep = timer - System.currentTimeMillis(); + if (sleep > 0){ + Thread.sleep(sleep); + } + timer = System.currentTimeMillis() + 1000; + } + LOG.info("Stream Log appender test, sequence id: " + i); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml new file mode 100644 index 000000000..ee3f4125a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + ` + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml new file mode 100644 index 000000000..7632a5718 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml @@ -0,0 +1,56 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector1x + + + 8 + 8 + 1.2.17 + 1.7.12 + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector-core + ${streamis.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + provided + + + + log4j + log4j + ${log4j.version} + provided + + + + junit + junit + ${junit.version} + test + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java new file mode 100644 index 000000000..ccbaf02fc --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java @@ -0,0 +1,189 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.log4j.AppenderSkeleton; +import org.apache.log4j.Level; +import org.apache.log4j.SimpleLayout; +import org.apache.log4j.helpers.LogLog; +import org.apache.log4j.spi.LoggingEvent; + +import java.util.*; + +/** + * Rpc appender for log4j1 + */ +public class StreamisRpcLogAppender extends AppenderSkeleton { + + /** + * Application name + */ + private String applicationName; + + /** + * Appender config + */ + private StreamisLog4jAppenderConfig appenderConfig; + + /** + * Rpc log sender + */ + private StreamisRpcLogSender rpcLogSender; + + /** + * Rpc log sender config + */ + private RpcLogSenderConfig rpcLogSenderConfig = new RpcLogSenderConfig(); + + + /** + * Cache + */ + private LogCache logCache; + + + @Override + protected void append(LoggingEvent loggingEvent) { + String content = super.getLayout().format(loggingEvent); + // Transform to stream log event; + StreamisLogEvent logEvent = new StreamisLogEvent(content, System.currentTimeMillis()); + if (Objects.nonNull(logCache)){ + try { + this.logCache.cacheLog(logEvent); + } catch (InterruptedException e) { + LogLog.error("StreamisRpcLogAppender: " + this.getName() + + " interrupted when cache the log into the RPC sender, message: " + e.getMessage()); + } + } + } + + @Override + public void close() { + if (Objects.nonNull(this.rpcLogSender)){ + this.rpcLogSender.close(); + } + } + + @Override + public boolean requiresLayout() { + return true; + } + + @Override + public void activateOptions() { + if (Objects.nonNull(this.logCache)){ + return; + } + if (Objects.isNull(getLayout())){ + setLayout(new SimpleLayout()); + } + // Search the config autowired class + List configAutowiredEntities = new ArrayList<>(); + StreamisLog4jAppenderConfig logAppenderConfig = null; + ServiceLoader.load(StreamisConfigAutowired.class, + StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add); + StreamisLog4jAppenderConfig.Builder builder = new StreamisLog4jAppenderConfig.Builder(this.applicationName, + getThreshold(), getFilter(), rpcLogSenderConfig); + for (StreamisConfigAutowired autowired : configAutowiredEntities){ + try { + logAppenderConfig = (StreamisLog4jAppenderConfig) autowired.logAppenderConfig(builder); + } catch (Exception e) { + LogLog.warn("Unable to autowired the config from: " +autowired.getClass().getName(), e); + } + } + if (Objects.isNull(logAppenderConfig)){ + logAppenderConfig = builder.build(); + } + this.applicationName = logAppenderConfig.getApplicationName(); + if (null == applicationName || applicationName.trim().equals("")){ + throw new IllegalArgumentException("Application name cannot be empty"); + } + this.appenderConfig = logAppenderConfig; + // Set the threshold to error default + setThreshold(Optional.ofNullable(logAppenderConfig.getThreshold()).orElse(Level.ERROR)); + // First to clear the filters + clearFilters(); + // Then to add filter + logAppenderConfig.getFilters().forEach(this :: addFilter); + System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig); + this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(), + this.appenderConfig.getSenderConfig()); + this.rpcLogSender.setExceptionListener((subject, t, message) -> + LogLog.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t)); + this.logCache = this.rpcLogSender.getOrCreateLogCache(); + } + + + public String getAppName() { + return applicationName; + } + + /** + * Application name + * @param applicationName name + */ + public void setAppName(String applicationName) { + this.applicationName = applicationName; + } + + public void setRpcAddress(String address){ + this.rpcLogSenderConfig.setAddress(address); + } + + public void setRpcConnTimeout(int connectionTimeout){ + this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout); + } + + public void setRpcSocketTimeout(int socketTimeout){ + this.rpcLogSenderConfig.setSocketTimeout(socketTimeout); + } + public void setRpcSendRetryCnt(int sendRetryCnt){ + this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt); + } + + public void setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){ + this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec); + } + + public void setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){ + this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec); + } + // Authentication + public void setRpcAuthTokenCodeKey(String tokenCodeKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey); + } + + public void setRpcAuthTokenUserKey(String tokenUserKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey); + } + + public void setRpcAuthTokenUser(String tokenUser){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser); + } + + public void setRpcAuthTokenCode(String tokenCode){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode); + } + + // Cache configuration + public void setRpcCacheSize(int cacheSize){ + this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize); + } + + public void setRpcCacheMaxConsumeThread(int maxConsumeThread){ + this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread); + } + + // Buffer configuration + public void setRpcBufferSize(int bufferSize){ + this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize); + } + + public void setRpcBufferExpireTimeInSec(int expireTimeInSec){ + this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java new file mode 100644 index 000000000..84489c155 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java @@ -0,0 +1,95 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import org.apache.log4j.Priority; +import org.apache.log4j.spi.Filter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Appender config for log4j1 + */ +public class StreamisLog4jAppenderConfig extends StreamisLogAppenderConfig { + + /** + * Filter in log4j1 + */ + private final List filters = new ArrayList<>(); + /** + * + */ + private final Priority threshold; + + protected StreamisLog4jAppenderConfig(String applicationName, Priority threshold, List filters, + RpcLogSenderConfig rpcLogSenderConfig) { + super(applicationName, rpcLogSenderConfig); + this.threshold = threshold; + this.filters.addAll(filters); + } + + public static class Builder extends StreamisLogAppenderConfig.Builder{ + + /** + * Filter rules + */ + private final List filters = new ArrayList<>(); + + /** + * Threshold + */ + private Priority threshold; + + public Builder(String applicationName, Priority threshold, Filter filter,RpcLogSenderConfig rpcLogSenderConfig) { + super(applicationName, rpcLogSenderConfig); + this.threshold = threshold; + if (Objects.nonNull(filter)) { + this.filters.add(filter); + } + } + + public StreamisLog4jAppenderConfig.Builder setFilter(Filter filter){ + this.filters.clear(); + this.filters.add(filter); + return this; + } + + public StreamisLog4jAppenderConfig.Builder withFilter(Filter filter){ + filters.add(filter); + return this; + } + + /** + * Set threshold + * @param threshold threshold + * @return builder + */ + public StreamisLog4jAppenderConfig.Builder threshold(Priority threshold){ + this.threshold = threshold; + return this; + } + public StreamisLog4jAppenderConfig build(){ + return new StreamisLog4jAppenderConfig(applicationName, threshold, filters, rpcLogSenderConfig); + } + } + + public List getFilters() { + return filters; + } + + public Priority getThreshold() { + return threshold; + } + + @Override + public String toString() { + return "StreamisLog4jAppenderConfig{" + + "applicationName='" + applicationName + '\'' + + ", senderConfig=" + senderConfig + + ", filters=" + filters + + ", threshold=" + threshold + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java new file mode 100644 index 000000000..0dcca02c9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import org.apache.log4j.PropertyConfigurator; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamisLogAppenderTest { + private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class); + @Test + public void appenderLog() throws InterruptedException { + PropertyConfigurator.configure(StreamisLogAppenderTest.class.getResource("/log4j.properties").getPath()); + int total = 1000; + int tps = 100; + long timer = System.currentTimeMillis() + 1000; + for(int i = 0; i < total; i ++){ + if (i > 0 && i % tps == 0){ + long sleep = timer - System.currentTimeMillis(); + if (sleep > 0){ + Thread.sleep(sleep); + } + timer = System.currentTimeMillis() + 1000; + } + LOG.info("Stream Log appender test, sequence id: " + i); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties new file mode 100644 index 000000000..85e2a9167 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties @@ -0,0 +1,43 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +### set log levels ### + +log4j.rootCategory=INFO,stream + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + +log4j.appender.stream=com.webank.wedatasphere.streamis.jobmanager.log.collector.StreamisRpcLogAppender +log4j.appender.stream.appName=stream_applicatioin +log4j.appender.stream.Threshold=INFO +log4j.appender.stream.layout=org.apache.log4j.PatternLayout +log4j.appender.stream.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.stream.rpcConnTimeout=3000 +log4j.appender.stream.rpcSocketTimeout=15000 +log4j.appender.stream.rpcSendRetryCnt=3 +log4j.appender.stream.rpcServerRecoveryTimeInSec=5 +log4j.appender.stream.rpcMaxDelayTimeInSec=60 +log4j.appender.stream.rpcAuthTokenCodeKey= +log4j.appender.stream.rpcAuthTokenUserKey= +log4j.appender.stream.rpcAuthTokenUser= +log4j.appender.stream.rpcAuthTokenCode= +log4j.appender.stream.rpcCacheSize=200 +log4j.appender.stream.rpcCacheMaxConsumeThread=1 +log4j.appender.stream.rpcBufferSize=50 +log4j.appender.stream.rpcBufferExpireTimeInSec=2 \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml new file mode 100644 index 000000000..904bec4dd --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml @@ -0,0 +1,52 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../../pom.xml + + 4.0.0 + + xspark-streamis-log-collector + + + 8 + 8 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector1x + ${streamis.version} + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + + + assemble + + single + + + install + + + + + src/main/assembly/package.xml + + false + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml new file mode 100644 index 000000000..8da27bf2c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml @@ -0,0 +1,19 @@ + + + package + + + jar + + false + + + / + true + runtime + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java new file mode 100644 index 000000000..a2a73adbe --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java @@ -0,0 +1,56 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.spark; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; + +import java.util.Optional; + +/** + * Autoconfigure the streamis config in Spark environment + */ +public class SparkStreamisConfigAutowired implements StreamisConfigAutowired { + + private static final String APP_NAME_CONFIG = "app.name"; + + private static final String SERVER_ADDRESS_CONFIG = "streamis.url"; + + private static final String COLLECTOR_URI_CONFIG = "streamis.log.collector.uri"; + + private static final String PROJECT_NAME_CONFIG = "project.name"; + + private static final String DEFAULT_COLLECTOR_URI = "/api/rest_j/v1/streamis/streamJobManager/log/collect/events"; + @Override + public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception { + // Load the config from system properties + Optional.ofNullable(System.getProperty(APP_NAME_CONFIG)).ifPresent(appName -> { + String projectName = System.getProperty(PROJECT_NAME_CONFIG); + if (null != projectName && !projectName.trim().equals("")){ + appName = projectName + "." + appName; + } + System.out.println("Spark env to streamis: application name =>" + appName); + builder.setAppName(appName); + }); + String serverAddress = System.getProperty(SERVER_ADDRESS_CONFIG); + if (null != serverAddress && !serverAddress.trim().equals("")){ + if (serverAddress.endsWith("/")){ + serverAddress = serverAddress.substring(0, serverAddress.length() - 1); + } + String collectorUri = System.getProperty(COLLECTOR_URI_CONFIG, DEFAULT_COLLECTOR_URI); + if (null != collectorUri && !collectorUri.trim().equals("")){ + if (!collectorUri.startsWith("/")){ + collectorUri = "/" + collectorUri; + } + serverAddress += collectorUri; + } + System.out.println("Spark env to streamis: server address =>" + serverAddress); + builder.setRpcAddress(serverAddress); + } + String user = System.getenv("USER"); + if (null == user || user.trim().equals("")){ + user = System.getProperty("user.name", "hadoop"); + } + System.out.println("Spark env to streamis: log user =>" + user); + builder.setRpcAuthTokenUser(user); + return builder.build(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dac2fcaed --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.spark.SparkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/pom.xml b/streamis-jobmanager/streamis-job-log/pom.xml new file mode 100644 index 000000000..eb79d84a5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/pom.xml @@ -0,0 +1,29 @@ + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.0 + + 4.0.0 + + streamis-job-log + pom + + job-log-collector/streamis-job-log-collector-core + job-log-collector/streamis-job-log-collector + job-log-collector/streamis-job-log-collector1x + job-log-collector/flink-streamis-log-collector + job-log-collector/xspark-streamis-log-collector + streamis-job-log-server + streamis-job-log-common + + + + 8 + 8 + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml new file mode 100644 index 000000000..fcff3592c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml @@ -0,0 +1,29 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../pom.xml + + 4.0.0 + + streamis-job-log-common + + + 8 + 8 + + + + + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java new file mode 100644 index 000000000..da3a7054b --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + + +/** + * Element defined of log + */ +public interface LogElement { + + /** + * Sequence id + * @return seq id + */ + int getSequenceId(); + + /** + * Log time + * @return log time + */ + long getLogTimeStamp(); + + /** + * Get content + * @return content array + */ + String[] getContents(); + + /** + * The importance of log + * 0: useless, 1: normal, 2:important + * @return + */ + int mark(); + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java new file mode 100644 index 000000000..6f8645f77 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + + +import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Log event for streamis + */ +public class StreamisLogEvent implements LogElement, Serializable { + + /** + * Log time + */ + private long logTimeInMills; + + /** + * Log content + */ + private String content; + + /** + * Mark + */ + private int mark; + + public StreamisLogEvent(){ + + } + public StreamisLogEvent(String content, long logTimeInMills){ + this.content = content; + this.logTimeInMills = logTimeInMills; + } + @Override + public int getSequenceId() { + return 0; + } + + @Override + public long getLogTimeStamp() { + return this.logTimeInMills; + } + + @Override + public String[] getContents() { + return new String[]{content}; + } + + public String getContent() { + return content; + } + + @Override + public int mark() { + return this.mark; + } + + public void setLogTimeStamp(long logTimeInMills) { + this.logTimeInMills = logTimeInMills; + } + + public void setContent(String content) { + this.content = content; + } + + public void setMark(int mark) { + this.mark = mark; + } + + public void setSequenceId(int sequenceId){ + // Ignore + } + + public String toJson(){ + return "{" + + "\"logTimeStamp\":" + logTimeInMills + + ",\"content\":" + (Objects.isNull(content)? null : "\"" + JsonTool.escapeStrValue(content) + "\"") + + ",\"sequenceId\":0" + + "}"; + + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java new file mode 100644 index 000000000..f2843c8af --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java @@ -0,0 +1,112 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + +import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool; + +import java.io.Serializable; +import java.util.Objects; + +public class StreamisLogEvents implements LogElement, Serializable { + + /** + * Application name + */ + private String appName; + /** + * Log time + */ + private long logTimeInMills; + + private StreamisLogEvent[] events; + public StreamisLogEvents(){ + + } + public StreamisLogEvents(String applicationName, StreamisLogEvent[] events){ + this.appName = applicationName; + this.events = events; + long maxTime = -1; + StreamisLogEvent lastEvent = events[events.length - 1]; + if (null == lastEvent) { + for (StreamisLogEvent event : events) { + long time = event.getLogTimeStamp(); + if (time > maxTime) { + maxTime = time; + } + } + this.logTimeInMills = maxTime; + }else { + this.logTimeInMills = lastEvent.getLogTimeStamp(); + } + + } + + @Override + public int getSequenceId() { + return 0; + } + + @Override + public long getLogTimeStamp() { + return this.logTimeInMills; + } + + + @Override + public String[] getContents() { + String[] contents = new String[events.length]; + for(int i = 0 ; i < contents.length; i++){ + contents[i] = events[i].getContent(); + } + return contents; + } + + @Override + public int mark() { + return 1; + } + + public String getAppName() { + return appName; + } + + public StreamisLogEvent[] getEvents() { + return events; + } + + public void setAppName(String appName) { + this.appName = appName; + } + + public void setLogTimeStamp(long logTimeInMills) { + this.logTimeInMills = logTimeInMills; + } + + public void setEvents(StreamisLogEvent[] events) { + this.events = events; + } + + public void setSequenceId(int sequenceId){ + // Ignore + } + + public String toJson(){ + return "{" + + "\"logTimeStamp\":" + logTimeInMills + + ",\"appName\":" + (Objects.isNull(appName)? null : "\"" + JsonTool.escapeStrValue(appName) + "\"") + + ",\"events\":[" + + (Objects.isNull(events) || events.length <=0 ? "" : joinEvents(events, ",") ) + "]" + + ",\"sequenceId\":0" + + "}"; + } + + private String joinEvents(StreamisLogEvent[] events, String separator){ + StringBuilder builder = new StringBuilder(); + for(int i = 0; i < events.length; i ++){ + builder.append(events[i].toJson()); + if (i < events.length - 1){ + builder.append(separator); + } + } + return builder.toString(); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java new file mode 100644 index 000000000..0822e820d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.json; + +import java.util.Locale; + +public class JsonTool { + static final char[] HEX_DIGITS = new char[] {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'}; + + /** + * Avoid the special char + * @param input input string + * @return output string + */ + public static String escapeStrValue(String input){ + char[] chars = input.toCharArray(); + StringBuilder sb = new StringBuilder(); + for (char c : chars) { + switch (c) { + case '\"': + sb.append("\\\""); + break; + case '\\': + sb.append("\\\\"); + break; + case '/': + sb.append("\\/"); + break; + case '\b': + sb.append("\\b"); + break; + case '\f': + sb.append("\\f"); + break; + case '\n': + sb.append("\\n"); + break; + case '\r': + sb.append("\\r"); + break; + case '\t': + sb.append("\\t"); + break; + default: + sb.append((c < 32) ? escapeUnicode(c) : c); + } + } + return sb.toString(); + } + + /** + * Escape unicode + * @param code char code + * @return escaped string + */ + private static String escapeUnicode(int code){ + if (code > 0xffff){ + return "\\u" + Integer.toHexString(code).toUpperCase(Locale.ENGLISH); + } else { + return "\\u" + HEX_DIGITS[(code >> 12) & 15] + + HEX_DIGITS[(code >> 8) & 15] + HEX_DIGITS[(code >> 4) & 15] + HEX_DIGITS[code & 15]; + } + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml new file mode 100644 index 000000000..165f6a520 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml @@ -0,0 +1,31 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.0 + ../pom.xml + + 4.0.0 + + streamis-job-log-server + + + 8 + 8 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-common + 0.2.0 + + + org.apache.linkis + linkis-module + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java new file mode 100644 index 000000000..7b839893a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server; + +public class StreamisJobLogAutoConfiguration { +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java new file mode 100644 index 000000000..c857d5249 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.config; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.TimeType; + +/** + * Store the configuration defined for job log + */ +public class StreamJobLogConfig { + + /** + * Bucket monitor name + */ + public static final CommonVars BUCKET_MONITOR_NAME = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.name", "Log-Storage-Bucket-Monitor"); + + /** + * Bucket monitor interval + */ + public static final CommonVars BUCKET_MONITOR_INTERVAL = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.interval", new TimeType("2m")); + + /** + * Bucket max idle time + */ + public static final CommonVars BUCKET_MAX_IDLE_TIME = CommonVars.apply("wds.stream.job.log.storage.bucket.max-idle-time", new TimeType("12h")); + + /** + * Bucket root path + */ + public static final CommonVars BUCKET_ROOT_PATH = CommonVars.apply("wds.stream.job.log.storage.bucket.root-path", "/data/stream/log"); + /** + * Max active part size in bucket + */ + public static final CommonVars BUCKET_MAX_ACTIVE_PART_SIZE = CommonVars.apply("wds.stream.job.log.storage.bucket.max-active-part-size", 100L); + + /** + * Compression of part in bucket + */ + public static final CommonVars BUCKET_PART_COMPRESS = CommonVars.apply("wds.stream.job.log.storage.bucket.part-compress", "gz"); + + /** + * Bucket layout + */ + public static final CommonVars BUCKET_LAYOUT = CommonVars.apply("wds.stream.job.log.storage.bucket.layout", "%msg"); + + public static final CommonVars BUCKET_PART_HOLD_DAY = CommonVars.apply("wds.stream.job.log.storage.bucket.part-hold-day", 30); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java new file mode 100644 index 000000000..8676c5778 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.entities; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +public class StreamisLogEvents extends com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents { + + @Override + @JsonIgnore + public String[] getContents() { + return super.getContents(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java new file mode 100644 index 000000000..56edc2dd3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +/** + * Stream job log exception + */ +public class StreamJobLogException extends ErrorException { + public StreamJobLogException(int errCode, String desc) { + super(errCode, desc); + } + public StreamJobLogException(int errCode, String desc, Throwable t){ + super(errCode, desc); + + } + public static class Runtime extends LinkisRuntimeException{ + + public Runtime(int errCode, String desc) { + super(errCode, desc); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java new file mode 100644 index 000000000..e9d19c651 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.restful; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.log.server.entities.StreamisLogEvents; +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; +import com.webank.wedatasphere.streamis.jobmanager.log.server.service.StreamisJobLogService; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; + +@RestController +@RequestMapping(path = "/streamis/streamJobManager/log") +public class JobLogRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(JobLogRestfulApi.class); + + @Resource + private StreamisJobLogService streamisJobLogService; + + @RequestMapping(value = "/collect/events", method = RequestMethod.POST) + public Message collectEvents(@RequestBody StreamisLogEvents events, HttpServletRequest request){ + Message result; + try{ + if (StringUtils.isBlank(events.getAppName())){ + return Message.ok("Ignore the stream log events without application name"); + } + String userName = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(userName)){ + throw new StreamJobLogException(-1, "The request should has token user"); + } + this.streamisJobLogService.store(userName, events); + result = Message.ok(); + }catch (Exception e){ + String message = "Fail to collect stream log events, message: " + e.getMessage(); + result = Message.error(message); + } + return result; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java new file mode 100644 index 000000000..8fea4dab6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.service; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import javax.annotation.Resource; + +/** + * Default implement + */ +@Service +public class DefaultStreamisJobLogService implements StreamisJobLogService{ + + @Resource + private JobLogStorage jobLogStorage; + + private JobLogBucketConfig jobLogBucketConfig; + + @PostConstruct + public void init(){ + jobLogBucketConfig = new JobLogBucketConfig(); + } + @Override + public void store(String user, StreamisLogEvents events) { + JobLogBucket jobLogBucket = jobLogStorage.getOrCreateBucket(user, events.getAppName(), jobLogBucketConfig); + // If cannot get log bucket, drop the events + if (null != jobLogBucket){ + jobLogBucket.getBucketStorageWriter().write(events); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java new file mode 100644 index 000000000..e8f8bfe4e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.service; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; + +/** + * Job log service + */ +public interface StreamisJobLogService { + + /** + * Store log events + * @param user user own + * @param events events + */ + void store(String user, StreamisLogEvents events); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java new file mode 100644 index 000000000..e70590338 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; + +/** + * Storage of job log + */ +public interface JobLogStorage { + + /** + * Create buckets + * @param userName user own + * @param appName application name + * @param bucketConfig bucket config + * @return config + */ + JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig); + + /** + * Init method + */ + void init(); + + /** + * Destroy method + */ + void destroy(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java new file mode 100644 index 000000000..154c8f6f1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java @@ -0,0 +1,136 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketState; +import org.apache.linkis.common.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.text.SimpleDateFormat; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import static com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig.BUCKET_MONITOR_INTERVAL; + +/** + * Job log storage + */ +@Component +public class StreamisJobLogStorage implements JobLogStorage{ + + private static final Logger LOG = LoggerFactory.getLogger(StreamisJobLogStorage.class); + /** + * Buckets + */ + private final Map buckets = new ConcurrentHashMap<>(); + + /** + * Constructor cache + */ + private final Map> bucketConstructors = new ConcurrentHashMap<>(); + /** + * To monitor the status of buckets + */ + private Future monitorThread; + + @Override + public JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig) { + String bucketName = toBucketName(userName, appName); + return buckets.computeIfAbsent(bucketName, name -> { + Class bucketClass = bucketConfig.getBucketClass(); + if (Objects.nonNull(bucketClass)) { + Constructor constructor = bucketConstructors.computeIfAbsent(bucketClass.getName(), className -> { + Constructor[] constructors = bucketClass.getConstructors(); + Constructor matchConstructor = null; + for (Constructor constructor1 : constructors) { + Class[] inputParams = constructor1.getParameterTypes(); + if (inputParams.length >= 2 && inputParams[0].equals(String.class) + && inputParams[1].equals(JobLogBucketConfig.class)) { + matchConstructor = constructor1; + break; + } + } + return matchConstructor; + }); + if (Objects.nonNull(constructor)) { + try { + return (JobLogBucket) constructor.newInstance(bucketName, bucketConfig); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + LOG.warn("Cannot create storage log bucket from [{}]", bucketClass.getName(), e); + } + } + } + return null; + }); + } + + @Override + @PostConstruct + public synchronized void init() { + if (Objects.isNull(monitorThread)){ + monitorThread = Utils.defaultScheduler().scheduleAtFixedRate(() -> { + String threadName = Thread.currentThread().getName(); + try { + Thread.currentThread().setName(StreamJobLogConfig.BUCKET_MONITOR_NAME.getValue()); + long maxIdleTime = StreamJobLogConfig.BUCKET_MAX_IDLE_TIME.getValue().toLong(); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + if (buckets.size() > 0) { + StringBuilder builder = new StringBuilder("Buckets in LogStorage: [\n"); + buckets.forEach((bucketName, bucket) -> { + JobLogBucketState bucketState = bucket.getBucketState(); + builder.append("bucket: [ name: ") + .append(bucketName) + .append(", path: ").append(bucketState.getBucketPath()) + .append(", parts: ").append(bucketState.getBucketParts()) + .append(", write-rate: ").append(bucketState.getBucketWriteRate()).append("/s") + .append(", last-write-time: ").append(dateFormat.format(bucketState.getBucketWriteTime())) + .append(" ]\n"); + if (bucketState.getBucketWriteTime() + maxIdleTime <= System.currentTimeMillis()) { + LOG.info("Close the idle bucket: [ name: {}, last-write-time: {} ]", + bucketName, dateFormat.format(bucketState.getBucketWriteTime())); + bucket.close(); + // Delete the bucket + buckets.remove(bucketName); + } + + }); + LOG.info(builder.toString()); + } + } finally { + Thread.currentThread().setName(threadName); + } + + },BUCKET_MONITOR_INTERVAL.getValue().toLong(), BUCKET_MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS); + } + } + + /** + * Bucket name + * @param userName username + * @param appName app name + * @return bucket name + */ + private String toBucketName(String userName, String appName){ + return userName + "." + appName; + } + + @Override + @PreDestroy + public void destroy() { + // Fist to close all the bucket + buckets.forEach((bucketName, bucket) -> bucket.close()); + if (null != monitorThread){ + monitorThread.cancel(true); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java new file mode 100644 index 000000000..b55ce818b --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * Job log bucket for streamis + */ +public interface JobLogBucket { + + /** + * Bucket state + * @return state + */ + JobLogBucketState getBucketState(); + + /** + * Storage writer + * @return storage writer + */ + JobLogStorageWriter getBucketStorageWriter(); + + /** + * Bucket name + * @return bucket name + */ + String getBucketName(); + /** + * Close the bucket + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java new file mode 100644 index 000000000..831cccc65 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java @@ -0,0 +1,125 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.HashMap; +import java.util.Map; + +/** + * Configuration for job log bucket + */ +public class JobLogBucketConfig { + + @SuppressWarnings("unchecked") + public JobLogBucketConfig(){ + try { + Class defaultBucketClass = Class.forName(Define.JOB_LOG_BUCKET_CLASS.getValue()); + if (JobLogBucket.class.isAssignableFrom(defaultBucketClass)){ + this.bucketClass = (Class) defaultBucketClass; + } + } catch (ClassNotFoundException e) { + throw new StreamJobLogException.Runtime(-1, "Cannot find the bucket class, message: " + e.getMessage()); + } + } + + /** + * Bucket class + */ + private Class bucketClass; + + /** + * Root path for bucket + */ + private String bucketRootPath = StreamJobLogConfig.BUCKET_ROOT_PATH.getValue(); + + /** + * Attribute + */ + protected Map attributes = new HashMap<>(); + + /** + * Max size of bucket active part (MB) + */ + private long maxBucketActivePartSize = StreamJobLogConfig.BUCKET_MAX_ACTIVE_PART_SIZE.getValue(); + + /** + * The compress format used for bucket parts + */ + private String bucketPartCompress = StreamJobLogConfig.BUCKET_PART_COMPRESS.getValue(); + + /** + * Max hold time in days for bucket part + */ + private int bucketPartHoldTimeInDay = StreamJobLogConfig.BUCKET_PART_HOLD_DAY.getValue(); + + /** + * Layout pattern + */ + private String LogLayOutPattern = StreamJobLogConfig.BUCKET_LAYOUT.getValue(); + + public Class getBucketClass() { + return bucketClass; + } + + public void setBucketClass(Class bucketClass) { + this.bucketClass = bucketClass; + } + + public String getBucketRootPath() { + return bucketRootPath; + } + + public void setBucketRootPath(String bucketRootPath) { + this.bucketRootPath = bucketRootPath; + } + + public Map getAttributes() { + return attributes; + } + + public void setAttributes(Map attributes) { + this.attributes = attributes; + } + + public long getMaxBucketActivePartSize() { + return maxBucketActivePartSize; + } + + public void setMaxBucketActivePartSize(long maxBucketActivePartSize) { + this.maxBucketActivePartSize = maxBucketActivePartSize; + } + + public String getBucketPartCompress() { + return bucketPartCompress; + } + + public void setBucketPartCompress(String bucketPartCompress) { + this.bucketPartCompress = bucketPartCompress; + } + + public int getBucketPartHoldTimeInDay() { + return bucketPartHoldTimeInDay; + } + + public void setBucketPartHoldTimeInDay(int bucketPartHoldTimeInDay) { + this.bucketPartHoldTimeInDay = bucketPartHoldTimeInDay; + } + + public String getLogLayOutPattern() { + return LogLayOutPattern; + } + + public void setLogLayOutPattern(String logLayOutPattern) { + LogLayOutPattern = logLayOutPattern; + } + + + public static final class Define{ + /** + * Default bucket class + */ + public static final CommonVars JOB_LOG_BUCKET_CLASS = CommonVars.apply("wds.streamis.job.log.bucket.class", "com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.Log4j2JobLogBucket"); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java new file mode 100644 index 000000000..d4b9b6b2a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * Factory of creating job log bucket + */ +public interface JobLogBucketFactory { + + /** + * Create bucket + * @param jobName job name + * @param config bucket config + * @return + */ + JobLogBucket createBucket(String jobName, JobLogBucketConfig config); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java new file mode 100644 index 000000000..8051e6d13 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * State of log bucket + */ +public interface JobLogBucketState { + + /** + * Bucket path + * @return path + */ + String getBucketPath(); + + /** + * Write rate + * @return rate + */ + double getBucketWriteRate(); + + /** + * Bucket parts + * @return number + */ + int getBucketParts(); + + /** + * Last rite time + * @return time + */ + long getBucketWriteTime(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java new file mode 100644 index 000000000..772040374 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Storage writer for job log + */ +public interface JobLogStorageWriter { + + /** + * Write log element + * @param logEl elements + * @param + */ + void write(LogElement logEl); + + /** + * Write log line + * @param logLine log line + */ + void write(String logLine); + + /** + * Close log storage + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java new file mode 100644 index 000000000..0915e0fa0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java @@ -0,0 +1,278 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.RollingFileAppender; +import org.apache.logging.log4j.core.appender.rolling.*; +import org.apache.logging.log4j.core.appender.rolling.action.*; +import org.apache.logging.log4j.core.config.AppenderRef; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.layout.PatternLayout; +import org.checkerframework.checker.units.qual.A; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Job log bucket for log4j + */ +public class Log4j2JobLogBucket implements JobLogBucket{ + + private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(Log4j2JobLogBucket.class); + + private static final String DEFAULT_FILE_PATTERN_SUFFIX = ".%d{yyyy-MM-dd}-%i"; + + private static final CommonVars ROLLOVER_MAX = CommonVars.apply("wds.stream.job.log.storage.bucket.log4j.rollover-max", 20); + /** + * Bucket name + */ + private final String bucketName; + + /** + * Logger context + */ + private final LoggerContext loggerContext; + + /** + * Logger entity + */ + private final Logger logger; + + /** + * Storage writer + */ + private final JobLogStorageWriter jobLogStorageWriter; + + /** + * Bucket state + */ + private final JobLogBucketState jobLogBucketState; + + /** + * Last write time; + */ + private long lastWriteTime; + + /** + * Prev Interval time + */ + private long preIntervalTime; + + /** + * Interval counter + */ + private final AtomicLong intervalCounter = new AtomicLong(0); + + /** + * Store the write rate + */ + private double writeRate; + public Log4j2JobLogBucket(String bucketName, JobLogBucketConfig config){ + this.bucketName = bucketName; + // Create logger context + this.loggerContext = (LoggerContext) LogManager.getContext(false); + this.logger = initLogger(this.bucketName, config, this.loggerContext); + this.jobLogStorageWriter = createStorageWriter(); + this.jobLogBucketState = createBucketState(); + } + @Override + public JobLogBucketState getBucketState() { + return this.jobLogBucketState; + } + + @Override + public JobLogStorageWriter getBucketStorageWriter() { + return this.jobLogStorageWriter; + } + + @Override + public String getBucketName() { + return this.bucketName; + } + + @Override + public void close() { + Configuration log4jConfig = this.loggerContext.getConfiguration(); + // First to stop appender + log4jConfig.getAppender(this.bucketName).stop(); + log4jConfig.getLoggerConfig(this.bucketName).removeAppender(this.bucketName); + log4jConfig.removeLogger(this.bucketName); + loggerContext.updateLoggers(); + } + + private synchronized Logger initLogger(String bucketName, JobLogBucketConfig config, LoggerContext loggerContext){ + Configuration log4jConfig = loggerContext.getConfiguration(); + String fileName = resolveFileName(config.getBucketRootPath(), bucketName); + RollingFileAppender appender = RollingFileAppender.newBuilder() + .setLayout(PatternLayout.newBuilder().withPattern(config.getLogLayOutPattern()).build()) + .setName(bucketName) +// .withFileOwner() + .withFileName(fileName) + .withFilePattern(resolveFilePattern(fileName, config.getBucketPartCompress())) + .withPolicy(SizeBasedTriggeringPolicy.createPolicy(config.getMaxBucketActivePartSize() + "MB")) + .withStrategy(createRolloverStrategy(log4jConfig, fileName, ROLLOVER_MAX.getValue(), config.getBucketPartHoldTimeInDay())) + .setConfiguration(log4jConfig) + .build(); + appender.start(); + log4jConfig.addAppender(appender); + LoggerConfig loggerConfig = LoggerConfig.newBuilder().withAdditivity(false).withLevel(Level.ALL) + .withRefs(new AppenderRef[]{ + AppenderRef.createAppenderRef(bucketName, null, null) + }) + .withLoggerName(bucketName).withConfig(log4jConfig).build(); + loggerConfig.addAppender(appender, null, null); + log4jConfig.addLogger(bucketName, loggerConfig); + // Should we update the logger context ? + loggerContext.updateLoggers(); + return loggerContext.getLogger(bucketName); + } + + /** + * Create storage writer + * @return storage writer + */ + private JobLogStorageWriter createStorageWriter(){ + return new JobLogStorageWriter() { + @Override + public void write(LogElement logEl) { + String[] contents = logEl.getContents(); + if (null != contents){ + for(String content : contents){ + write(content); + } + } + } + + @Override + public void write(String logLine) { + logger.info(logLine); + long currentTime = System.currentTimeMillis(); + long intervalCnt = intervalCounter.getAndIncrement(); + long intervalTime = (currentTime - preIntervalTime)/1000; + // Per minute accumulate the rate + if ( intervalTime >= 60){ + writeRate = (double)intervalCnt / (double)intervalTime; + preIntervalTime = currentTime; + intervalCounter.set(0); + } + lastWriteTime = currentTime; + + } + + @Override + public void close() { + // Ignore + } + }; + } + + /** + * Create bucket state + * @return bucket state + */ + private JobLogBucketState createBucketState(){ + return new JobLogBucketState() { + private String bucketPath; + @Override + public String getBucketPath() { + if (StringUtils.isBlank(bucketPath)) { + Appender appender = loggerContext.getConfiguration().getAppender(bucketName); + if (appender instanceof RollingFileAppender) { + bucketPath = new File(((RollingFileAppender) appender).getFileName()).getParent(); + } + } + return this.bucketPath; + } + + @Override + public double getBucketWriteRate() { + return writeRate; + } + + @Override + public int getBucketParts() { + AtomicInteger parts = new AtomicInteger(-1); + String bucketPath = getBucketPath(); + if (StringUtils.isNotBlank(bucketPath)){ + Optional.ofNullable(new File(bucketPath).list()).ifPresent(list -> parts.set(list.length)); + } + return parts.get(); + } + + @Override + public long getBucketWriteTime() { + return lastWriteTime; + } + }; + } + /** + * Create rollover strategy + * @param configuration configuration + * @param fileName file name + * @param rolloverMax rollover max inf file pattern + * @param fileHoldDay file hold day time + * @return strategy + */ + private RolloverStrategy createRolloverStrategy(Configuration configuration, + String fileName, int rolloverMax, int fileHoldDay){ + DefaultRolloverStrategy.Builder builder = DefaultRolloverStrategy.newBuilder(); + if (rolloverMax > 0){ + builder.withMax(rolloverMax + ""); + } + if (fileHoldDay > 0){ + // Create the actions to delete old file + builder.withCustomActions(new Action[]{ + DeleteAction.createDeleteAction(new File(fileName).getParent(), false, 2, false, null, + new PathCondition[]{ + IfFileName.createNameCondition(null, ".*"), + IfLastModified.createAgeCondition(Duration.parse(fileHoldDay + "d")) + }, + null, configuration) + } + ); + } + return builder.build(); + } + /** + * Ex: /data/stream/log/hadoop/{projectName}/{jobName}/{projectName}.{jobName}.log + * @param bucketRootPath bucket root path + * @param bucketName bucket name + * @return file name with absolute path + */ + private String resolveFileName(String bucketRootPath, String bucketName){ + // {projectName}.{jobName} + String fileName = FilenameUtils.normalize(bucketName); + String basePath = bucketRootPath; + if (!basePath.endsWith("/")){ + basePath += "/"; + } + basePath += fileName.replace(".", "/"); + return basePath + "/" + fileName.substring(bucketName.indexOf(".") + 1) + ".log"; + } + + /** + * Resolve file pattern + * @param fileName file name + * @param format format + * @return file pattern + */ + private String resolveFilePattern(String fileName, String format){ + String filePattern = fileName + Log4j2JobLogBucket.DEFAULT_FILE_PATTERN_SUFFIX; + if (StringUtils.isNotBlank(format)){ + filePattern = filePattern + (format.startsWith(".") ? format : "." +format); + } + return filePattern; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java new file mode 100644 index 000000000..ba9c002d6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * Use the appender and strategy of log4j (version 1.x) to implement the bucket + */ +public class StreamisJobLogBucket { +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java new file mode 100644 index 000000000..6ced214f2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.streamis.jobmanager.log; + +public class LogStorageTest { +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java index c380ca954..3e002c45d 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java @@ -30,6 +30,7 @@ List getJobLists(@Param("projectName") String projectName, @Para StreamJob getJobById(@Param("jobId") Long jobId); + List getJobByName(@Param("jobName") String jobName); List getJobVersions(@Param("jobId") Long jobId); diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml index 71e2b38a6..875da3e4f 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml @@ -86,6 +86,11 @@ + + diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala index 89cab6f92..41b91e348 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala @@ -15,7 +15,7 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.conf -import org.apache.linkis.common.conf.{CommonVars, TimeType} +import org.apache.linkis.common.conf.{CommonVars, Configuration, TimeType} import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException @@ -27,6 +27,18 @@ object JobConf { val STREAMIS_JOB_MONITOR_ENABLE: CommonVars[Boolean] = CommonVars("wds.streamis.job.monitor.enable", true) + val STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER: CommonVars[String] = CommonVars("wds.streamis.job.param.blank.placeholder", "\u0001") + + /** + * Gateway for stream job log module + */ + val STREAMIS_JOB_LOG_GATEWAY: CommonVars[String] = CommonVars("wds.streamis.job.log.gateway", Configuration.getGateWayURL()) + + /** + * Path for collecting stream job log + */ + val STREAMIS_JOB_LOG_COLLECT_PATH: CommonVars[String] = CommonVars("wds.streamis.job.log.collect.path", "/api/rest_j/v1/streamis/streamJobManager/log/collect/events") + val FLINK_JOB_STATUS_NOT_STARTED: CommonVars[Int] = CommonVars("wds.streamis.job.status.not-started", 0,"Not Started") val FLINK_JOB_STATUS_COMPLETED: CommonVars[Int] = CommonVars("wds.streamis.job.status.completed", 1,"Completed") @@ -82,4 +94,7 @@ object JobConf { val TASK_SUBMIT_TIME_MAX: CommonVars[TimeType] = CommonVars("wds.streamis.task.submit.time.max", new TimeType("5m")) + val SUPPORTED_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.supported.job.types", "flink.jar,flink.sql,spark.jar") + + val SUPPORTED_MANAGEMENT_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.management.supported.job.types", "flink.jar,flink.sql") } diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala index c3f467118..e0c270d99 100755 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala @@ -17,26 +17,26 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service import java.util import java.util.Date + import com.github.pagehelper.PageInfo import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService import com.webank.wedatasphere.streamis.jobmanager.manager.alert.AlertLevel -import org.apache.linkis.common.exception.ErrorException -import org.apache.linkis.common.utils.Logging import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamAlertMapper, StreamJobMapper, StreamTaskMapper} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity._ import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{QueryJobListVo, TaskCoreNumVo, VersionDetailVo} -import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{MetaJsonInfo, StreamAlertRecord, StreamJob, StreamJobVersion, StreamJobVersionFiles} import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobCreateErrorException, JobFetchErrorException} import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent import com.webank.wedatasphere.streamis.jobmanager.manager.util.{ReaderUtils, ZipHelper} import org.apache.commons.lang.StringUtils +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.Logging import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service import org.springframework.transaction.annotation.Transactional -import javax.annotation.Resource import scala.collection.JavaConverters._ @@ -60,6 +60,8 @@ class DefaultStreamJobService extends StreamJobService with Logging { this.streamJobMapper.getJobById(jobId) } + override def getJobByName(jobName: String): util.List[StreamJob] = streamJobMapper.getJobByName(jobName) + override def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String): PageInfo[QueryJobListVo] = { val streamJobList = streamJobMapper.getJobLists(projectName, userName, jobName, jobStatus, jobCreator) if (streamJobList != null && !streamJobList.isEmpty) { @@ -126,6 +128,9 @@ class DefaultStreamJobService extends StreamJobService with Logging { override def createStreamJob(metaJsonInfo: MetaJsonInfo, userName: String): StreamJobVersion = { if(StringUtils.isBlank(metaJsonInfo.getJobType)) throw new JobCreateErrorException(30030, s"jobType is needed.") + else if(!JobConf.SUPPORTED_JOB_TYPES.getValue.contains(metaJsonInfo.getJobType)) { + throw new JobCreateErrorException(30030, s"jobType ${metaJsonInfo.getJobType} is not supported.") + } if(metaJsonInfo.getJobContent == null || metaJsonInfo.getJobContent.isEmpty) throw new JobCreateErrorException(30030, s"jobContent is needed.") val job = streamJobMapper.getCurrentJob(metaJsonInfo.getProjectName, metaJsonInfo.getJobName) diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala index 1913fc0c9..d03b0b9e0 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala @@ -15,6 +15,10 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service +import java.util +import java.util.concurrent.Future +import java.util.{Calendar, function} + import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager @@ -22,22 +26,23 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobInfo, LaunchJob} import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint} -import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo} import com.webank.wedatasphere.streamis.jobmanager.manager.SpringContextHolder import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf.FLINK_JOB_STATUS_FAILED import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper} -import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask -import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobProgressVo, JobStatusVo, PauseResultVo, ScheduleResultVo, StreamTaskListVo} -import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobErrorException, JobExecuteErrorException, JobFetchErrorException, JobPauseErrorException, JobTaskErrorException} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo._ +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobExecuteErrorException, JobFetchErrorException, JobPauseErrorException, JobTaskErrorException} import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.FutureScheduler import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.AbstractStreamisSchedulerEvent.StreamisEventInfo -import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.{AbstractStreamisSchedulerEvent, StreamisPhaseInSchedulerEvent} +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent.ScheduleCommand import com.webank.wedatasphere.streamis.jobmanager.manager.transform.exception.TransformFailedErrorException -import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisTransformJobBuilder, Transform} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisTransformJobBuilder, TaskMetricsParser, Transform} import com.webank.wedatasphere.streamis.jobmanager.manager.util.DateUtils import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils +import javax.annotation.Resource import org.apache.commons.lang.StringUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.httpclient.dws.DWSHttpClient @@ -47,10 +52,6 @@ import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service import org.springframework.transaction.annotation.Transactional -import java.util -import java.util.{Calendar, Date, function} -import java.util.concurrent.Future -import javax.annotation.Resource import scala.collection.JavaConverters._ @@ -60,6 +61,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ @Autowired private var streamTaskMapper:StreamTaskMapper=_ @Autowired private var streamJobMapper:StreamJobMapper=_ @Autowired private var streamisTransformJobBuilders: Array[StreamisTransformJobBuilder] = _ + @Autowired private var taskMetricsParser: Array[TaskMetricsParser] = _ @Resource private var jobLaunchManager: JobLaunchManager[_ <: JobInfo] = _ @@ -398,11 +400,24 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo) jobClient match { case client: FlinkJobClient => + requestPayload.setLogHistory(JobConf.isCompleted(streamTask.getStatus)) val logIterator = client.fetchLogs(requestPayload) returnMap.put("logPath", logIterator.getLogPath) returnMap.put("logs", logIterator.getLogs) returnMap.put("endLine", logIterator.getEndLine) logIterator.close() + jobClient.getJobInfo match { + case linkisInfo: LinkisJobInfo => + if (StringUtils.isBlank(linkisInfo.getLogDirSuffix) && StringUtils.isNotBlank(logIterator.getLogDirSuffix)){ + Utils.tryAndWarn { + // Update the linkis job info and store into database + linkisInfo.setLogDirSuffix(logIterator.getLogDirSuffix) + streamTask.setLinkisJobInfo(DWSHttpClient.jacksonJson.writeValueAsString(linkisInfo)); + streamTaskMapper.updateTask(streamTask) + } + } + case _ => + } } }{ case e: Exception => // Just warn the exception @@ -470,7 +485,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ }).asJava } - def getTask(jobId:Long, version: String): FlinkJobInfo ={ + def getTaskJobInfo(jobId:Long, version: String): FlinkJobInfo ={ val str = streamTaskMapper.getTask(jobId, version) if (StringUtils.isBlank(str)) { return new FlinkJobInfo @@ -511,6 +526,9 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ } } + + override def getLatestTaskByJobId(jobId: Long): StreamTask = streamTaskMapper.getLatestByJobId(jobId) + /** * Create new task use the latest job version * @@ -545,6 +563,8 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ } } + override def updateTask(streamTask: StreamTask): Unit = streamTaskMapper.updateTask(streamTask) + /** * Just launch task by task id * @@ -703,4 +723,15 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{ } } + override def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo = { + val flinkJobInfo = getTaskJobInfo(streamJob.getId, version) + val jobStateInfos = flinkJobInfo.getJobStates + val metricsStr = if (JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(streamJob.getJobType)) null + else if(jobStateInfos == null || jobStateInfos.length == 0) null + else jobStateInfos(0).getLocation + taskMetricsParser.find(_.canParse(streamJob)).map(_.parse(metricsStr)).filter { jobDetailsVO => + jobDetailsVO.setLinkisJobInfo(flinkJobInfo) + true + }.getOrElse(throw new JobFetchErrorException(30030, s"Cannot find a TaskMetricsParser to parse job details.")) + } } diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala index 5f27f0864..5ac798177 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala @@ -15,6 +15,9 @@ trait StreamJobService { def getJobById(jobId: Long): StreamJob + + def getJobByName(jobName: String): util.List[StreamJob] + /** * Page list query * @param projectName project name diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala index 079a8d8f8..26591d23f 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala @@ -18,9 +18,8 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo -import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask -import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobProgressVo, JobStatusVo, PauseResultVo, StreamTaskListVo} - +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobDetailsVo, JobProgressVo, JobStatusVo, PauseResultVo, StreamTaskListVo} import java.util import java.util.concurrent.Future /** @@ -87,6 +86,8 @@ trait StreamTaskService { */ def launch(taskId: Long, execUser: String): Unit + def getLatestTaskByJobId(jobId: Long): StreamTask + /** * Create new task use the latest job version * @param jobId job id @@ -95,6 +96,8 @@ trait StreamTaskService { */ def createTask(jobId: Long, status: Int, creator: String): StreamTask + def updateTask(streamTask: StreamTask): Unit + /** * Update the task status * @param jobId job id @@ -146,9 +149,11 @@ trait StreamTaskService { * @param version version * @return */ - def getTask(jobId: Long, version: String): FlinkJobInfo + def getTaskJobInfo(jobId: Long, version: String): FlinkJobInfo def getStateInfo(taskId: Long): JobState + def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo + } diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala index edf36bb94..f6f250e6a 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala @@ -18,6 +18,7 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service import java.util import java.util.Date import java.util.concurrent.{Future, TimeUnit} + import com.google.common.collect.Sets import com.webank.wedatasphere.streamis.jobmanager.launcher.JobLauncherAutoConfiguration import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants @@ -30,7 +31,6 @@ import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper} import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils - import javax.annotation.{PostConstruct, PreDestroy, Resource} import org.apache.commons.lang.exception.ExceptionUtils import org.apache.linkis.common.exception.ErrorException @@ -84,58 +84,64 @@ class TaskMonitorService extends Logging { return } streamTasks.filter(shouldMonitor).foreach { streamTask => - streamTask.setLastUpdateTime(new Date) - streamTaskMapper.updateTask(streamTask) val job = streamJobMapper.getJobById(streamTask.getJobId) - info(s"Try to update status of StreamJob-${job.getName}.") - val retryHandler = new RetryHandler {} - retryHandler.setRetryNum(3) - retryHandler.setRetryMaxPeriod(2000) - retryHandler.addRetryException(classOf[ErrorException]) - var jobInfo:JobInfo = null - Utils.tryCatch { - jobInfo = retryHandler.retry(refresh(streamTask, jobLaunchManager), s"Task-Monitor-${job.getName}") - } { ex => { - error(s"Fetch StreamJob-${job.getName} failed, maybe the Linkis cluster is wrong, please be noticed!", ex) - val errorMsg = ExceptionUtils.getRootCauseMessage(ex) - if (errorMsg != null && errorMsg.contains("Not exists EngineConn")) { - streamTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue) - streamTask.setErrDesc("Not exists EngineConn.") - } else { - // 连续三次还是出现异常,说明Linkis的Manager已经不能正常提供服务,告警并不再尝试获取状态,等待下次尝试 - val users = getAlertUsers(job) - users.add(job.getCreateBy) - alert(jobService.getAlertLevel(job), s"请求LinkisManager失败,Linkis集群出现异常,请关注!影响任务[${job.getName}]", users, streamTask) - } - } - } - streamTaskMapper.updateTask(streamTask) - if(streamTask.getStatus == JobConf.FLINK_JOB_STATUS_FAILED.getValue) { - warn(s"StreamJob-${job.getName} is failed, please be noticed.") - var extraMessage = "" - Option(jobInfo) match { - case Some(flinkJobInfo: FlinkJobInfo) => - extraMessage = s",${flinkJobInfo.getApplicationId}" - case _ => - } - // Need to add restart feature if user sets the restart parameters. - var alertMsg = s"Streamis 流式应用[${job.getName}${extraMessage}]已经失败, 请登陆Streamis查看应用日志." - this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.FAIL_RESTART_SWITCH.getValue) match { - case "ON" => - alertMsg = s"${alertMsg} 现将自动拉起该应用" - Utils.tryCatch{ - info(s"Start to reLaunch the StreamisJob [${job.getName}], now to submit and schedule it...") - // Use submit user to start job - val future: Future[String] = streamTaskService.asyncExecute(job.getId, 0L, job.getSubmitUser, true) - }{ - case e:Exception => - warn(s"Fail to reLaunch the StreamisJob [${job.getName}]", e) - } - case _ => - } + if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(job.getJobType)) { val userList = Sets.newHashSet(job.getSubmitUser, job.getCreateBy) userList.addAll(getAlertUsers(job)) + val alertMsg = s"Spark Streaming应用[${job.getName}]已经超过 ${Utils.msDurationToString(System.currentTimeMillis - streamTask.getLastUpdateTime.getTime)} 没有更新状态, 请及时确认应用是否正常!" alert(jobService.getAlertLevel(job), alertMsg, new util.ArrayList[String](userList), streamTask) + } else { + streamTask.setLastUpdateTime(new Date) + streamTaskMapper.updateTask(streamTask) + info(s"Try to update status of StreamJob-${job.getName}.") + val retryHandler = new RetryHandler {} + retryHandler.setRetryNum(3) + retryHandler.setRetryMaxPeriod(2000) + retryHandler.addRetryException(classOf[ErrorException]) + var jobInfo:JobInfo = null + Utils.tryCatch { + jobInfo = retryHandler.retry(refresh(streamTask, jobLaunchManager), s"Task-Monitor-${job.getName}") + } { ex => + error(s"Fetch StreamJob-${job.getName} failed, maybe the Linkis cluster is wrong, please be noticed!", ex) + val errorMsg = ExceptionUtils.getRootCauseMessage(ex) + if (errorMsg != null && errorMsg.contains("Not exists EngineConn")) { + streamTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue) + streamTask.setErrDesc("Not exists EngineConn.") + } else { + // 连续三次还是出现异常,说明Linkis的Manager已经不能正常提供服务,告警并不再尝试获取状态,等待下次尝试 + val users = getAlertUsers(job) + users.add(job.getCreateBy) + alert(jobService.getAlertLevel(job), s"请求LinkisManager失败,Linkis集群出现异常,请关注!影响任务[${job.getName}]", users, streamTask) + } + } + streamTaskMapper.updateTask(streamTask) + if(streamTask.getStatus == JobConf.FLINK_JOB_STATUS_FAILED.getValue) { + warn(s"StreamJob-${job.getName} is failed, please be noticed.") + var extraMessage = "" + Option(jobInfo) match { + case Some(flinkJobInfo: FlinkJobInfo) => + extraMessage = s",${flinkJobInfo.getApplicationId}" + case _ => + } + // Need to add restart feature if user sets the restart parameters. + var alertMsg = s"Streamis 流式应用[${job.getName}${extraMessage}]已经失败, 请登陆Streamis查看应用日志." + this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.FAIL_RESTART_SWITCH.getValue) match { + case "ON" => + alertMsg = s"${alertMsg} 现将自动拉起该应用" + Utils.tryCatch{ + info(s"Start to reLaunch the StreamisJob [${job.getName}], now to submit and schedule it...") + // Use submit user to start job + val future: Future[String] = streamTaskService.asyncExecute(job.getId, 0L, job.getSubmitUser, true) + }{ + case e:Exception => + warn(s"Fail to reLaunch the StreamisJob [${job.getName}]", e) + } + case _ => + } + val userList = Sets.newHashSet(job.getSubmitUser, job.getCreateBy) + userList.addAll(getAlertUsers(job)) + alert(jobService.getAlertLevel(job), alertMsg, new util.ArrayList[String](userList), streamTask) + } } } info("All StreamTasks status have updated.") diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala new file mode 100644 index 000000000..1e7ad7b68 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +trait TaskMetricsParser { + + def canParse(streamJob: StreamJob): Boolean + + def parse(metrics: String): JobDetailsVo + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala index 039fbd3e2..622961329 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala @@ -15,15 +15,19 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.manager.label.entity.engine.RunType.RunType import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob import com.webank.wedatasphere.streamis.jobmanager.manager.transform.StreamisTransformJobBuilder import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJobEngineConnImpl, StreamisTransformJob, StreamisTransformJobContent, StreamisTransformJobImpl} import org.springframework.beans.factory.annotation.Autowired +import java.util +import scala.collection.JavaConverters.mapAsJavaMapConverter /** * Created by enjoyyin on 2021/9/22. */ @@ -39,7 +43,13 @@ abstract class AbstractStreamisTransformJobBuilder extends StreamisTransformJobB override def build(streamJob: StreamJob): StreamisTransformJob = { val transformJob = createStreamisTransformJob() transformJob.setStreamJob(streamJob) - transformJob.setConfigMap(streamJobConfService.getJobConfig(streamJob.getId)) + val jobConfig: util.Map[String, Any] = Option(streamJobConfService.getJobConfig(streamJob.getId)) + .getOrElse(new util.HashMap[String, Any]()) + // Put and overwrite internal group, users cannot customize the internal configuration + val internalGroup = new util.HashMap[String, Any]() + jobConfig.put(JobConfKeyConstants.GROUP_INTERNAL.getValue, internalGroup) + internalLogConfig(internalGroup) + transformJob.setConfigMap(jobConfig) // transformJob.setConfig(configurationService.getFullTree(streamJob.getId)) val streamJobVersions = streamJobMapper.getJobVersions(streamJob.getId) // 无需判断streamJobVersions是否非空,因为TaskService已经判断了 @@ -48,6 +58,14 @@ abstract class AbstractStreamisTransformJobBuilder extends StreamisTransformJobB transformJob } + /** + * Log internal configuration + * @param internal internal config group + */ + private def internalLogConfig(internal: util.Map[String, Any]): Unit = { + internal.put(JobConf.STREAMIS_JOB_LOG_GATEWAY.key, JobConf.STREAMIS_JOB_LOG_GATEWAY.getValue) + internal.put(JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key, JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.getValue) + } } abstract class AbstractFlinkStreamisTransformJobBuilder extends AbstractStreamisTransformJobBuilder{ diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala index 01426f656..7d797aaf5 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala @@ -23,6 +23,7 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Checkpoint import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkCheckpointConfigTransform.CHECKPOINT_PATH_CONFIG_NAME +import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.common.utils.Logging import scala.collection.JavaConverters._ @@ -62,5 +63,5 @@ class FlinkCheckpointConfigTransform extends FlinkConfigTransform with Logging{ } object FlinkCheckpointConfigTransform{ - val CHECKPOINT_PATH_CONFIG_NAME = "state.checkpoints.dir" + private val CHECKPOINT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.checkpoint-path", "state.checkpoints.dir").getValue } diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala new file mode 100644 index 000000000..be9782734 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkInternalConfigTransform.INTERNAL_CONFIG_MAP +import org.apache.linkis.common.conf.CommonVars + +import java.util +import scala.collection.JavaConverters.{mapAsJavaMapConverter, mapAsScalaMapConverter} + +/** + * Flink internal config transform + */ +class FlinkInternalConfigTransform extends FlinkConfigTransform { + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_INTERNAL.getValue + + override protected def transform(internalConfig: util.Map[String, Any], job: LaunchJob): LaunchJob = { + transformConfig(internalConfig.asScala.map{ + case (key, value) => + (FlinkConfigTransform.FLINK_CONFIG_PREFIX + (INTERNAL_CONFIG_MAP.get(key) match { + case Some(mappingKey) => mappingKey + case _ => value + }), value) + }.asJava, job) + } +} + +object FlinkInternalConfigTransform { + /** + * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector' + */ + private val LOG_GATEWAY_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-gateway", "stream.log.gateway.address").getValue + + /** + * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector' + */ + private val LOG_COLLECT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-collect-path", "stream.log.collect.path").getValue + + + val INTERNAL_CONFIG_MAP = Map(JobConf.STREAMIS_JOB_LOG_GATEWAY.key -> LOG_GATEWAY_CONFIG_NAME, + JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key -> LOG_COLLECT_PATH_CONFIG_NAME + ) +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala new file mode 100644 index 000000000..eb0de6d6e --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.TaskMetricsParser +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.httpclient.dws.DWSHttpClient + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +trait AbstractTaskMetricsParser extends TaskMetricsParser { + + override def parse(metrics: String): JobDetailsVo = { + val jobDetailsVO = new JobDetailsVo + val dataNumberDTOS = new util.ArrayList[JobDetailsVo.DataNumberDTO] + val loadConditionDTOs = new util.ArrayList[JobDetailsVo.LoadConditionDTO] + val realTimeTrafficDTOS = new util.ArrayList[JobDetailsVo.RealTimeTrafficDTO] + jobDetailsVO.setDataNumber(dataNumberDTOS) + jobDetailsVO.setLoadCondition(loadConditionDTOs) + jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS) + val metricsMap = if(StringUtils.isNotBlank(metrics)) DWSHttpClient.jacksonJson.readValue(metrics, classOf[util.Map[String, Object]]) + else new util.HashMap[String, Object](0) + parse(metricsMap, dataNumberDTOS, loadConditionDTOs, realTimeTrafficDTOS) + jobDetailsVO + } + + protected def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala new file mode 100644 index 000000000..48eb8ad06 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import org.springframework.stereotype.Component + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class FlinkTaskMetricsParser extends AbstractTaskMetricsParser { + + override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("flink.") + + override def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = { + // TODO This is just sample datas, waiting for it completed. We have planned it to a later release, welcome all partners to join us to realize this powerful feature. + val dataNumberDTO = new JobDetailsVo.DataNumberDTO + dataNumberDTO.setDataName("kafka topic") + dataNumberDTO.setDataNumber(109345) + dataNumberDTOS.add(dataNumberDTO) + + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType("jobManager") + loadConditionDTO.setHost("localhost") + loadConditionDTO.setMemory("1.5") + loadConditionDTO.setTotalMemory("2.0") + loadConditionDTO.setGcLastTime("2020-08-01") + loadConditionDTO.setGcLastConsume("1") + loadConditionDTO.setGcTotalTime("2min") + loadConditionDTOs.add(loadConditionDTO) + + val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO + realTimeTrafficDTO.setSourceKey("kafka topic") + realTimeTrafficDTO.setSourceSpeed("100 Records/S") + realTimeTrafficDTO.setTransformKey("transform") + realTimeTrafficDTO.setSinkKey("hbase key") + realTimeTrafficDTO.setSinkSpeed("10 Records/S") + realTimeTrafficDTOS.add(realTimeTrafficDTO) + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala new file mode 100644 index 000000000..d730753c2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala @@ -0,0 +1,78 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamJobVersionFiles, StreamisFile} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJobContent} +import org.apache.commons.lang.StringUtils +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.springframework.stereotype.Component + +import scala.collection.JavaConverters._ + +/** + * + * @date 2022-10-19 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class SparkJarJobContentParser extends AbstractJobContentParser { + + override val jobType: String = "spark.jar" + override val runType: RunType = RunType.JAR + + override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = { + val createFile: String => StreamisFile = fileName => { + val file = new StreamJobVersionFiles() + file.setFileName(fileName) + file.setCreateBy(job.getCreateBy) + file.setCreateTime(job.getCreateTime) + file.setJobId(job.getId) + file.setJobVersionId(jobVersion.getId) + file.setVersion(jobVersion.getVersion) + file.setStorePath("") + file.setStoreType("") + file + } + val transformJobContent = new StreamisJarTransformJobContent + val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]]) + jobContent.get("main.class.jar") match { + case mainClassJar: String => + transformJobContent.setMainClassJar(createFile(mainClassJar)) + case _ => throw new JobExecuteErrorException(30500, "main.class.jar is needed.") + } + jobContent.get("main.class") match { + case mainClass: String => + transformJobContent.setMainClass(mainClass) + case _ => throw new JobExecuteErrorException(30500, "main.class is needed.") + } + jobContent.get("args") match { + case args: util.List[String] => + transformJobContent.setArgs(args) + case _ => + } + jobContent.get("hdfs.jars") match { + case hdfsJars: util.List[String] => + transformJobContent.setHdfsJars(hdfsJars) + case _ => + } + jobContent.get("dependency.jars") match { + case dependencyJars: util.List[String] => + val parsedDependencyJars = dependencyJars.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava + transformJobContent.setDependencyJars(parsedDependencyJars) + case _ => + } + jobContent.get("resources") match { + case resources: util.List[String] => + val parsedResources = resources.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava + transformJobContent.setResources(parsedResources) + case _ => + } + transformJobContent + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala new file mode 100644 index 000000000..c6e00d7ff --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala @@ -0,0 +1,86 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import org.apache.linkis.common.utils.Utils +import org.springframework.stereotype.Component + +import scala.collection.JavaConverters._ + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class SparkTaskMetricsParser extends AbstractTaskMetricsParser { + + override protected def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = { + val addDataNumberDTO: String => Unit = key => { + val batch = new JobDetailsVo.DataNumberDTO + batch.setDataName(key) + batch.setDataNumber(metricsMap.get(key) match { + case null => -1 + case num => num.toString.toInt + }) + dataNumberDTOS.add(batch) + } + addDataNumberDTO("waitingBatchs") + addDataNumberDTO("runningBatchs") + addDataNumberDTO("completedBatchs") + metricsMap.get("executors") match { + case executors: util.List[util.Map[String, AnyRef]] if !executors.isEmpty => + executors.asScala.foreach { executor => + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType(executor.get("type").asInstanceOf[String]) + loadConditionDTO.setHost(executor.get("host").asInstanceOf[String]) + loadConditionDTO.setMemory(executor.get("memory").asInstanceOf[String]) + loadConditionDTO.setTotalMemory(executor.get("totalMemory").asInstanceOf[String]) + loadConditionDTO.setGcLastTime(executor.get("gcLastTime").asInstanceOf[String]) + loadConditionDTO.setGcLastConsume(executor.get("gcLastConsume").asInstanceOf[String]) + loadConditionDTO.setGcTotalTime(executor.get("gcTotalTime").asInstanceOf[String]) + loadConditionDTOs.add(loadConditionDTO) + } + case _ => + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType("Driver") + loadConditionDTO.setHost("") + loadConditionDTO.setMemory("") + loadConditionDTO.setTotalMemory("") + loadConditionDTO.setGcLastTime("") + loadConditionDTO.setGcLastConsume("") + loadConditionDTO.setGcTotalTime("") + loadConditionDTOs.add(loadConditionDTO) + } + val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO + metricsMap.get("batchMetrics") match { + case batchMetrics: util.List[util.Map[String, Object]] if !batchMetrics.isEmpty => + val batchMetric = batchMetrics.asScala.maxBy(_.get("batchTime").asInstanceOf[String]) + realTimeTrafficDTO.setSourceKey(metricsMap.getOrDefault("source", "").asInstanceOf[String]) + realTimeTrafficDTO.setSourceSpeed(batchMetric.get("inputRecords") + " Records") + realTimeTrafficDTO.setTransformKey("processing") + realTimeTrafficDTO.setSinkKey(metricsMap.getOrDefault("sink", "").asInstanceOf[String]) + val sinkSpeed = if (batchMetric.containsKey("totalDelay") && batchMetric.get("totalDelay") != null) + Utils.msDurationToString(batchMetric.get("totalDelay").toString.toInt) + " totalDelay" + else if (batchMetric.containsKey("taskExecuteTime") && batchMetric.get("taskExecuteTime") != null) + Utils.msDurationToString(batchMetric.get("taskExecuteTime").toString.toInt) + " executeTime(Last Batch)" + else "" + realTimeTrafficDTO.setSinkSpeed(sinkSpeed) + case _ => + realTimeTrafficDTO.setSourceKey("") + realTimeTrafficDTO.setSourceSpeed(" Records/S") + realTimeTrafficDTO.setTransformKey("") + realTimeTrafficDTO.setSinkKey("") + realTimeTrafficDTO.setSinkSpeed(" Records/S") + } + realTimeTrafficDTOS.add(realTimeTrafficDTO) + } + + override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("spark.") +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala index 485f9c8ce..75a62b53d 100644 --- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala @@ -1,5 +1,7 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.utils +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf + import java.util import scala.collection.JavaConverters.{asScalaSetConverter, mapAsScalaMapConverter} @@ -12,7 +14,7 @@ object JobUtils { for (paramEntry <- params.entrySet().asScala){ val value = paramEntry.getValue value match { - case str: String => paramEntry.setValue(str.replace(" ", "\\0x001")) + case str: String => paramEntry.setValue(str.replace(" ", JobConf.STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER.getValue)) case _ => } } diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java index 6d6591d74..bb84d42f7 100644 --- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java @@ -15,23 +15,31 @@ package com.webank.wedatasphere.streamis.jobmanager.restful.api; +import com.fasterxml.jackson.core.JsonProcessingException; import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageInfo; import com.webank.wedatasphere.streamis.jobmanager.exception.JobException; import com.webank.wedatasphere.streamis.jobmanager.exception.JobExceptionManager; import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo; import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo; import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo; +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf; import com.webank.wedatasphere.streamis.jobmanager.manager.entity.MetaJsonInfo; import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.*; import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamTaskService; import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent; +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.httpclient.dws.DWSHttpClient; import org.apache.linkis.server.Message; import org.apache.linkis.server.security.SecurityFilter; import org.slf4j.Logger; @@ -42,11 +50,9 @@ import javax.annotation.Resource; import javax.servlet.http.HttpServletRequest; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; @RequestMapping(path = "/streamis/streamJobManager/job") @RestController @@ -73,7 +79,7 @@ public Message getJobList(HttpServletRequest req, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "jobName", required = false) String jobName, @RequestParam(value = "jobStatus", required = false) Integer jobStatus, - @RequestParam(value = "jobCreator", required = false) String jobCreator) throws JobException { + @RequestParam(value = "jobCreator", required = false) String jobCreator) { String username = SecurityFilter.getLoginUsername(req); if(StringUtils.isBlank(projectName)){ return Message.error("Project name cannot be empty(项目名不能为空,请指定)"); @@ -96,7 +102,7 @@ public Message getJobList(HttpServletRequest req, } @RequestMapping(path = "/createOrUpdate", method = RequestMethod.POST) - public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody MetaJsonInfo metaJsonInfo) throws Exception { + public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody MetaJsonInfo metaJsonInfo) { String username = SecurityFilter.getLoginUsername(req); String projectName = metaJsonInfo.getProjectName(); if (StringUtils.isBlank(projectName)){ @@ -138,6 +144,11 @@ public Message executeJob(HttpServletRequest req, @RequestBody Map dataNumberDTOS = new ArrayList<>(); - JobDetailsVo.DataNumberDTO dataNumberDTO = new JobDetailsVo.DataNumberDTO(); - dataNumberDTO.setDataName("kafka topic"); - dataNumberDTO.setDataNumber(109345); - dataNumberDTOS.add(dataNumberDTO); - - List loadConditionDTOs = new ArrayList<>(); - JobDetailsVo.LoadConditionDTO loadConditionDTO = new JobDetailsVo.LoadConditionDTO(); - loadConditionDTO.setType("jobManager"); - loadConditionDTO.setHost("localhost"); - loadConditionDTO.setMemory("1.5"); - loadConditionDTO.setTotalMemory("2.0"); - loadConditionDTO.setGcLastTime("2020-08-01"); - loadConditionDTO.setGcLastConsume("1"); - loadConditionDTO.setGcTotalTime("2min"); - loadConditionDTOs.add(loadConditionDTO); - - List realTimeTrafficDTOS = new ArrayList<>(); - JobDetailsVo.RealTimeTrafficDTO realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO(); - realTimeTrafficDTO.setSourceKey("kafka topic"); - realTimeTrafficDTO.setSourceSpeed("100 Records/S"); - realTimeTrafficDTO.setTransformKey("transform"); - realTimeTrafficDTO.setSinkKey("hbase key"); - realTimeTrafficDTO.setSinkSpeed("10 Records/S"); - realTimeTrafficDTOS.add(realTimeTrafficDTO); - - - jobDetailsVO.setLinkisJobInfo(streamTaskService.getTask(jobId,version)); - jobDetailsVO.setDataNumber(dataNumberDTOS); - jobDetailsVO.setLoadCondition(loadConditionDTOs); - jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS); - - return Message.ok().data("details", jobDetailsVO); + String username = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]"); + } + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } + return Message.ok().data("details", streamTaskService.getJobDetailsVO(streamJob, version)); } @RequestMapping(path = "/execute/history", method = RequestMethod.GET) public Message executeHistoryJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, - @RequestParam(value = "version", required = false) String version) throws IOException, JobException { + @RequestParam(value = "version", required = false) String version) throws JobException { String username = SecurityFilter.getLoginUsername(req); if (jobId == null) { throw JobExceptionManager.createException(30301, "jobId"); @@ -238,14 +232,204 @@ public Message executeHistoryJob(HttpServletRequest req, return Message.ok().data("details", details); } + private Message withStreamJob(HttpServletRequest req, String projectName, + String jobName, String username, + Function streamJobFunction) { + if(StringUtils.isBlank(projectName)) { + return Message.error("projectName cannot be empty!"); + } else if(StringUtils.isBlank(jobName)) { + return Message.error("jobName cannot be empty!"); + } + List streamJobs = streamJobService.getByProList(projectName, username, jobName, null, null).getList(); + if(CollectionUtils.isEmpty(streamJobs)) { + return Message.error("Not exits Streamis job " + jobName); + } else if(streamJobs.size() > 1) { + return Message.error("Too many Streamis Job named " + jobName + ", we cannot distinguish between them."); + } else if(!"spark.jar".equals(streamJobs.get(0).getJobType())) { + return Message.error("Only spark.jar Job support to manage task."); + } + StreamJob streamJob = streamJobService.getJobById(streamJobs.get(0).getId()); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to operate task for StreamJob [" + jobName + "]."); + } + return streamJobFunction.apply(streamJob); + } + + @RequestMapping(path = "/addTask", method = RequestMethod.GET) + public Message addTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "appUrl") String appUrl) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to add a new task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl); + if(StringUtils.isBlank(appId)) { + return Message.error("appId cannot be empty!"); + } + return withStreamJob(req, projectName, jobName, username, streamJob -> { + // 如果存在正在运行的,先将其停止掉 + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { + LOG.warn("Streamis Job {} exists running task, update its status from Running to stopped at first.", jobName); + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); + streamTask.setErrDesc("stopped by App's new task."); + streamTaskService.updateTask(streamTask); + } + if(streamTask == null || StringUtils.isBlank(streamTask.getLinkisJobInfo())) { + // 这里取个巧,从该工程该用户有权限的Job中找到一个Flink的历史作业,作为这个Spark Streaming作业的jobId和jobInfo + // 替换掉JobInfo中的 yarn 信息,这样我们前端就可以在不修改任何逻辑的情况下正常展示Spark Streaming作业了 + PageInfo jobList = streamJobService.getByProList(streamJob.getProjectName(), username, null, null, null); + List copyJobs = jobList.getList().stream().filter(job -> !job.getJobType().startsWith("spark.")) + .collect(Collectors.toList()); + if(copyJobs.isEmpty()) { + return Message.error("no Flink Job has been submitted, the register to Streamis cannot be succeeded."); + } + int index = 0; + streamTask = null; + while(streamTask == null && index < copyJobs.size()) { + StreamTask copyTask = streamTaskService.getLatestTaskByJobId(copyJobs.get(index).getId()); + if(copyTask == null || StringUtils.isBlank(copyTask.getLinkisJobInfo())) { + index ++; + } else { + LOG.warn("Streamis Job {} will bind the linkisJobInfo from history Flink Job {} with linkisJobId: {}, linkisJobInfo: {}.", + jobName, copyJobs.get(index).getName(), copyTask.getLinkisJobId(), copyTask.getLinkisJobInfo()); + streamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username); + streamTask.setLinkisJobId(copyTask.getLinkisJobId()); + streamTask.setLinkisJobInfo(copyTask.getLinkisJobInfo()); + } + } + if(streamTask == null) { + return Message.error("no Flink task has been executed, the register to Streamis cannot be succeeded."); + } + } else { + StreamTask newStreamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username); + streamTask.setId(newStreamTask.getId()); + streamTask.setVersion(newStreamTask.getVersion()); + streamTask.setErrDesc(""); + streamTask.setStatus(newStreamTask.getStatus()); + streamTask.setSubmitUser(username); + } + streamTask.setStartTime(new Date()); + streamTask.setLastUpdateTime(new Date()); + StreamTask finalStreamTask = streamTask; + return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> { + flinkJobInfo.setApplicationId(appId); + flinkJobInfo.setApplicationUrl(appUrl); + flinkJobInfo.setName(jobName); + flinkJobInfo.setStatus(JobConf.getStatusString(finalStreamTask.getStatus())); + StreamTaskUtils.refreshInfo(finalStreamTask, flinkJobInfo); + streamTaskService.updateTask(finalStreamTask); + LOG.info("Streamis Job {} has added a new task successfully.", jobName); + return Message.ok(); + }); + }); + } + + private Message withFlinkJobInfo(String jobName, String flinkJobInfoStr, Function flinkJobInfoFunction) { + FlinkJobInfo flinkJobInfo; + try { + flinkJobInfo = DWSHttpClient.jacksonJson().readValue(flinkJobInfoStr, FlinkJobInfo.class); + } catch (JsonProcessingException e) { + LOG.error("Job {} deserialize the flinkJobInfo string to object failed!", jobName, e); + return Message.error("Deserialize the flinkJobInfo string to object failed!"); + } + return flinkJobInfoFunction.apply(flinkJobInfo); + } + + @RequestMapping(path = "/updateTask", method = RequestMethod.GET) + public Message updateTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "metrics") String metrics) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to update task for Streamis job {}.{} with appId: {}, metrics: {}.", username, projectName, jobName, appId, metrics); + return withStreamJob(req, projectName, jobName, username, streamJob -> { + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if (streamTask == null) { + LOG.warn("Job {} is not exists running task, ignore to update its metrics.", jobName); + return Message.ok("not exists running task, ignore it."); + } else if (JobConf.isCompleted(streamTask.getStatus())) { + LOG.warn("The task of job {} is completed, ignore to update its metrics.", jobName); + return Message.ok("Task is completed, ignore to update its metrics."); + } + return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> { + if (!flinkJobInfo.getApplicationId().equals(appId)) { + LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to update its metrics.", + jobName, flinkJobInfo.getApplicationId(), appId); + return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId()); + } + JobStateInfo jobStateInfo = new JobStateInfo(); + jobStateInfo.setTimestamp(System.currentTimeMillis()); + jobStateInfo.setLocation(metrics); + flinkJobInfo.setJobStates(new JobStateInfo[]{jobStateInfo}); + StreamTaskUtils.refreshInfo(streamTask, flinkJobInfo); + streamTaskService.updateTask(streamTask); + LOG.info("Streamis Job {} has updated the task metrics successfully.", jobName); + return Message.ok(); + }); + }); + } + + @RequestMapping(path = "/updateTask", method = RequestMethod.POST) + public Message updateTask(HttpServletRequest req, + @RequestBody Map json) { + String projectName = json.get("projectName"); + String jobName = json.get("jobName"); + String appId = json.get("appId"); + String metrics = json.get("metrics"); + return updateTask(req, projectName, jobName, appId, metrics); + } + + @RequestMapping(path = "/stopTask", method = RequestMethod.GET) + public Message stopTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "appUrl") String appUrl) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to stop task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl); + return withStreamJob(req, projectName, jobName, username, + streamJob -> tryStopTask(streamJob, appId)); + } + + private Message tryStopTask(StreamJob streamJob, String appId) { + // 如果存在正在运行的,将其停止掉 + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { + return withFlinkJobInfo(streamJob.getName(), streamTask.getLinkisJobInfo(), flinkJobInfo -> { + if(appId == null || flinkJobInfo.getApplicationId().equals(appId)) { + LOG.warn("Streamis Job {} is exists running task, update its status to stopped.", streamJob.getName()); + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); + streamTask.setErrDesc("stopped by App itself."); + streamTaskService.updateTask(streamTask); + return Message.ok(); + } else { + LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to stop it.", + streamJob.getName(), flinkJobInfo.getApplicationId(), appId); + return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId()); + } + }); + } else { + LOG.warn("Streamis Job {} is not exists running task, ignore to stop it.", streamJob.getName()); + return Message.ok(); + } + } + @RequestMapping(path = "/progress", method = RequestMethod.GET) public Message progressJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, - @RequestParam(value = "version", required = false) String version) throws IOException, JobException { + @RequestParam(value = "version", required = false) String version) throws JobException { String username = SecurityFilter.getLoginUsername(req); if (jobId == null) { throw JobExceptionManager.createException(30301, "jobId"); } StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + return Message.error("Job " + streamJob.getName() + " is not supported to get progress."); + } if (!streamJobService.hasPermission(streamJob, username) && !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { return Message.error("Have no permission to view the progress of StreamJob [" + jobId + "]"); @@ -271,7 +455,6 @@ public Message uploadDetailsJob(HttpServletRequest req, @RequestParam(value = "j public Message getAlert(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, @RequestParam(value = "version", required = false) String version) { String username = SecurityFilter.getLoginUsername(req); - return Message.ok().data("list", streamJobService.getAlert(username, jobId, version)); } @@ -291,6 +474,12 @@ public Message getLog(HttpServletRequest req, logType = StringUtils.isBlank(logType) ? "client" : logType; String username = SecurityFilter.getLoginUsername(req); StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType()) && + "client".equals(logType)) { + return Message.error("Job " + streamJob.getName() + " is not supported to get client logs."); + } if (!streamJobService.hasPermission(streamJob, username) && !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { return Message.error("Have no permission to fetch logs from StreamJob [" + jobId + "]"); @@ -336,6 +525,11 @@ public Message snapshot(@PathVariable("jobId")Long jobId, HttpServletRequest req try{ String username = SecurityFilter.getLoginUsername(request); StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + return Message.error("Job " + streamJob.getName() + " is not supported to do snapshot."); + } if (!streamJobService.hasPermission(streamJob, username) && !this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())){ return Message.error("Have no permission to do snapshot for StreamJob [" + jobId + "]"); diff --git a/streamis-server/pom.xml b/streamis-server/pom.xml index 701457496..1bf8ecfba 100644 --- a/streamis-server/pom.xml +++ b/streamis-server/pom.xml @@ -69,6 +69,12 @@ ${streamis.version} + + + com.webank.wedatasphere.streamis + streamis-job-log-server + ${streamis.version} + diff --git a/streamis-server/src/main/resources/linkis.properties b/streamis-server/src/main/resources/linkis.properties index b80410c10..b09dbbc4a 100644 --- a/streamis-server/src/main/resources/linkis.properties +++ b/streamis-server/src/main/resources/linkis.properties @@ -34,7 +34,8 @@ wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.streamis.datasou com.webank.wedatasphere.streamis.project.server.restful,\ com.webank.wedatasphere.streamis.jobmanager.restful.api,\ com.webank.wedatasphere.streamis.datasource.execute.rest,\ - com.webank.wedatasphere.streamis.projectmanager.restful.api + com.webank.wedatasphere.streamis.projectmanager.restful.api,\ + com.webank.wedatasphere.streamis.jobmanager.log.server.restful ##mybatis wds.linkis.server.mybatis.mapperLocations=\ classpath*:com/webank/wedatasphere/streamis/datasource/manager/dao/impl/*.xml,\