Skip to content

Commit 73a89ba

Browse files
committed
Use the scala-logging wrapper instead of the directly sfl4j api.
1 parent 0da07da commit 73a89ba

File tree

35 files changed

+151
-97
lines changed

35 files changed

+151
-97
lines changed

core/pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,10 @@
9898
<groupId>org.slf4j</groupId>
9999
<artifactId>jcl-over-slf4j</artifactId>
100100
</dependency>
101+
<dependency>
102+
<groupId>com.typesafe.scala-logging</groupId>
103+
<artifactId>scala-logging-slf4j_${scala.binary.version}</artifactId>
104+
</dependency>
101105
<dependency>
102106
<groupId>log4j</groupId>
103107
<artifactId>log4j</artifactId>

core/src/main/scala/org/apache/spark/Logging.scala

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@
1818
package org.apache.spark
1919

2020
import org.apache.log4j.{LogManager, PropertyConfigurator}
21-
import org.slf4j.{Logger, LoggerFactory}
21+
import org.slf4j.LoggerFactory
2222
import org.slf4j.impl.StaticLoggerBinder
23+
import com.typesafe.scalalogging.slf4j.Logger
2324

2425
import org.apache.spark.annotation.DeveloperApi
2526
import org.apache.spark.util.Utils
@@ -39,61 +40,69 @@ trait Logging {
3940
// be serialized and used on another machine
4041
@transient private var log_ : Logger = null
4142

43+
// Method to get the logger name for this object
44+
protected def logName = {
45+
var className = this.getClass.getName
46+
// Ignore trailing $'s in the class names for Scala objects
47+
if (className.endsWith("$")) {
48+
className = className.substring(0, className.length - 1)
49+
}
50+
className
51+
}
52+
4253
// Method to get or create the logger for this object
4354
protected def log: Logger = {
4455
if (log_ == null) {
4556
initializeIfNecessary()
46-
var className = this.getClass.getName
47-
// Ignore trailing $'s in the class names for Scala objects
48-
log_ = LoggerFactory.getLogger(className.stripSuffix("$"))
57+
log_ = Logger(LoggerFactory.getLogger(logName))
4958
}
5059
log_
5160
}
5261

5362
// Log methods that take only a String
5463
protected def logInfo(msg: => String) {
55-
if (log.isInfoEnabled) log.info(msg)
64+
log.info(msg)
5665
}
5766

5867
protected def logDebug(msg: => String) {
59-
if (log.isDebugEnabled) log.debug(msg)
68+
log.debug(msg)
6069
}
6170

6271
protected def logTrace(msg: => String) {
63-
if (log.isTraceEnabled) log.trace(msg)
72+
log.trace(msg)
6473
}
6574

6675
protected def logWarning(msg: => String) {
67-
if (log.isWarnEnabled) log.warn(msg)
76+
log.warn(msg)
6877
}
6978

7079
protected def logError(msg: => String) {
71-
if (log.isErrorEnabled) log.error(msg)
80+
log.error(msg)
7281
}
7382

7483
// Log methods that take Throwables (Exceptions/Errors) too
7584
protected def logInfo(msg: => String, throwable: Throwable) {
76-
if (log.isInfoEnabled) log.info(msg, throwable)
85+
log.info(msg, throwable)
7786
}
7887

7988
protected def logDebug(msg: => String, throwable: Throwable) {
80-
if (log.isDebugEnabled) log.debug(msg, throwable)
89+
log.debug(msg, throwable)
8190
}
8291

8392
protected def logTrace(msg: => String, throwable: Throwable) {
84-
if (log.isTraceEnabled) log.trace(msg, throwable)
93+
log.trace(msg, throwable)
8594
}
8695

8796
protected def logWarning(msg: => String, throwable: Throwable) {
88-
if (log.isWarnEnabled) log.warn(msg, throwable)
97+
log.warn(msg, throwable)
8998
}
9099

91100
protected def logError(msg: => String, throwable: Throwable) {
92-
if (log.isErrorEnabled) log.error(msg, throwable)
101+
log.error(msg, throwable)
93102
}
94103

95104
protected def isTraceEnabled(): Boolean = {
96-
log.isTraceEnabled
105+
log.underlying.isTraceEnabled
97106
}
98107

99108
private def initializeIfNecessary() {

core/src/main/scala/org/apache/spark/util/SignalLogger.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.util
1919

2020
import org.apache.commons.lang3.SystemUtils
21-
import org.slf4j.Logger
21+
import com.typesafe.scalalogging.slf4j.Logger
2222
import sun.misc.{Signal, SignalHandler}
2323

2424
/**

mllib/pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,10 @@
5454
<artifactId>breeze_${scala.binary.version}</artifactId>
5555
<version>0.7</version>
5656
<exclusions>
57+
<exclusion>
58+
<groupId>com.typesafe</groupId>
59+
<artifactId>scalalogging-slf4j_${scala.binary.version}</artifactId>
60+
</exclusion>
5761
<!-- This is included as a compile-scoped dependency by jtransforms, which is
5862
a dependency of breeze. -->
5963
<exclusion>

pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,11 @@
279279
<artifactId>slf4j-log4j12</artifactId>
280280
<version>${slf4j.version}</version>
281281
</dependency>
282+
<dependency>
283+
<groupId>com.typesafe.scala-logging</groupId>
284+
<artifactId>scala-logging-slf4j_${scala.binary.version}</artifactId>
285+
<version>2.1.2</version>
286+
</dependency>
282287
<dependency>
283288
<groupId>org.slf4j</groupId>
284289
<artifactId>jul-to-slf4j</artifactId>

project/MimaExcludes.scala

Lines changed: 37 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,14 +103,49 @@ object MimaExcludes {
103103
ProblemFilters.exclude[IncompatibleMethTypeProblem](
104104
"org.apache.spark.mllib.tree.impurity.Variance.calculate")
105105
) ++
106-
Seq ( // Package-private classes removed in SPARK-2341
106+
Seq( // Package-private classes removed in SPARK-2341
107107
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.BinaryLabelParser"),
108108
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.BinaryLabelParser$"),
109109
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.LabelParser"),
110110
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.LabelParser$"),
111111
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.MulticlassLabelParser"),
112112
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.mllib.util.MulticlassLabelParser$")
113-
)
113+
) ++
114+
Seq(ProblemFilters.exclude[IncompatibleResultTypeProblem]
115+
("org.apache.spark.bagel.Bagel.log"),
116+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
117+
("org.apache.spark.streaming.StreamingContext.log"),
118+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
119+
("org.apache.spark.streaming.dstream.DStream.log") ,
120+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
121+
("org.apache.spark.mllib.recommendation.ALS.log"),
122+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
123+
("org.apache.spark.mllib.clustering.KMeans.log"),
124+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
125+
("org.apache.spark.mllib.classification.NaiveBayes.log"),
126+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
127+
("org.apache.spark.streaming.kafka.KafkaReceiver.log"),
128+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
129+
("org.apache.spark.SparkContext.log"),
130+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
131+
("org.apache.spark.rdd.PairRDDFunctions.log"),
132+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
133+
("org.apache.spark.rdd.OrderedRDDFunctions.log"),
134+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
135+
("org.apache.spark.rdd.SequenceFileRDDFunctions.log"),
136+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
137+
("org.apache.spark.rdd.DoubleRDDFunctions.log"),
138+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
139+
("org.apache.spark.streaming.twitter.TwitterReceiver.log"),
140+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
141+
("org.apache.spark.streaming.zeromq.ZeroMQReceiver.log"),
142+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
143+
("org.apache.spark.streaming.flume.FlumeReceiver.log") ,
144+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
145+
("org.apache.spark.rdd.RDD.log"),
146+
ProblemFilters.exclude[IncompatibleResultTypeProblem]
147+
("org.apache.spark.SparkConf.log")
148+
)
114149
case v if v.startsWith("1.0") =>
115150
Seq(
116151
MimaBuild.excludeSparkPackage("api.java"),

sql/catalyst/pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,6 @@
5454
<artifactId>spark-core_${scala.binary.version}</artifactId>
5555
<version>${project.version}</version>
5656
</dependency>
57-
<dependency>
58-
<groupId>com.typesafe</groupId>
59-
<artifactId>scalalogging-slf4j_${scala.binary.version}</artifactId>
60-
<version>1.0.1</version>
61-
</dependency>
6257
<dependency>
6358
<groupId>org.scalatest</groupId>
6459
<artifactId>scalatest_${scala.binary.version}</artifactId>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,12 +109,12 @@ class Analyzer(catalog: Catalog, registry: FunctionRegistry, caseSensitive: Bool
109109
object ResolveReferences extends Rule[LogicalPlan] {
110110
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
111111
case q: LogicalPlan if q.childrenResolved =>
112-
logger.trace(s"Attempting to resolve ${q.simpleString}")
112+
log.trace(s"Attempting to resolve ${q.simpleString}")
113113
q transformExpressions {
114114
case u @ UnresolvedAttribute(name) =>
115115
// Leave unchanged if resolution fails. Hopefully will be resolved next round.
116116
val result = q.resolve(name).getOrElse(u)
117-
logger.debug(s"Resolving $u to $result")
117+
log.debug(s"Resolving $u to $result")
118118
result
119119
}
120120
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ trait HiveTypeCoercion {
7575
// Leave the same if the dataTypes match.
7676
case Some(newType) if a.dataType == newType.dataType => a
7777
case Some(newType) =>
78-
logger.debug(s"Promoting $a to $newType in ${q.simpleString}}")
78+
log.debug(s"Promoting $a to $newType in ${q.simpleString}}")
7979
newType
8080
}
8181
}
@@ -154,7 +154,7 @@ trait HiveTypeCoercion {
154154
(Alias(Cast(l, StringType), l.name)(), r)
155155

156156
case (l, r) if l.dataType != r.dataType =>
157-
logger.debug(s"Resolving mismatched union input ${l.dataType}, ${r.dataType}")
157+
log.debug(s"Resolving mismatched union input ${l.dataType}, ${r.dataType}")
158158
findTightestCommonType(l.dataType, r.dataType).map { widestType =>
159159
val newLeft =
160160
if (l.dataType == widestType) l else Alias(Cast(l, widestType), l.name)()
@@ -170,15 +170,15 @@ trait HiveTypeCoercion {
170170

171171
val newLeft =
172172
if (castedLeft.map(_.dataType) != left.output.map(_.dataType)) {
173-
logger.debug(s"Widening numeric types in union $castedLeft ${left.output}")
173+
log.debug(s"Widening numeric types in union $castedLeft ${left.output}")
174174
Project(castedLeft, left)
175175
} else {
176176
left
177177
}
178178

179179
val newRight =
180180
if (castedRight.map(_.dataType) != right.output.map(_.dataType)) {
181-
logger.debug(s"Widening numeric types in union $castedRight ${right.output}")
181+
log.debug(s"Widening numeric types in union $castedRight ${right.output}")
182182
Project(castedRight, right)
183183
} else {
184184
right

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions
1919

20-
import org.apache.spark.sql.catalyst.Logging
20+
import org.apache.spark.Logging
2121
import org.apache.spark.sql.catalyst.errors.attachTree
2222
import org.apache.spark.sql.catalyst.types._
2323
import org.apache.spark.sql.catalyst.trees

0 commit comments

Comments
 (0)