Skip to content

Commit 19be223

Browse files
committed
lazy val on other aggregators
1 parent 961fbaa commit 19be223

File tree

5 files changed

+7
-6
lines changed

5 files changed

+7
-6
lines changed

mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -458,6 +458,7 @@ private class LinearSVCAggregator(
458458
*/
459459
def add(instance: Instance): this.type = {
460460
instance match { case Instance(label, weight, features) =>
461+
require(weight >= 0.0, s"instance weight, $weight has to be >= 0.0")
461462
require(numFeatures == features.size, s"Dimensions mismatch when adding new instance." +
462463
s" Expecting $numFeatures but got ${features.size}.")
463464
if (weight == 0.0) return this

mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1436,7 +1436,7 @@ private class LogisticAggregator(
14361436
case _ => throw new IllegalArgumentException(s"coefficients only supports dense vector but " +
14371437
s"got type ${bcCoefficients.value.getClass}.)")
14381438
}
1439-
private val gradientSumArray = new Array[Double](coefficientSize)
1439+
private lazy val gradientSumArray = new Array[Double](coefficientSize)
14401440

14411441
if (multinomial && numClasses <= 2) {
14421442
logInfo(s"Multinomial logistic regression for binary classification yields separate " +

mllib/src/main/scala/org/apache/spark/ml/clustering/GaussianMixture.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -580,10 +580,10 @@ private class ExpectationAggregator(
580580
private val k: Int = bcWeights.value.length
581581
private var totalCnt: Long = 0L
582582
private var newLogLikelihood: Double = 0.0
583-
private val newWeights: Array[Double] = new Array[Double](k)
584-
private val newMeans: Array[DenseVector] = Array.fill(k)(
583+
private lazy val newWeights: Array[Double] = new Array[Double](k)
584+
private lazy val newMeans: Array[DenseVector] = Array.fill(k)(
585585
new DenseVector(Array.fill[Double](numFeatures)(0.0)))
586-
private val newCovs: Array[DenseVector] = Array.fill(k)(
586+
private lazy val newCovs: Array[DenseVector] = Array.fill(k)(
587587
new DenseVector(Array.fill[Double](numFeatures * (numFeatures + 1) / 2)(0.0)))
588588

589589
@transient private lazy val oldGaussians = {

mllib/src/main/scala/org/apache/spark/ml/regression/AFTSurvivalRegression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -526,7 +526,7 @@ private class AFTAggregator(
526526
private var totalCnt: Long = 0L
527527
private var lossSum = 0.0
528528
// Here we optimize loss function over log(sigma), intercept and coefficients
529-
private val gradientSumArray = Array.ofDim[Double](length)
529+
private lazy val gradientSumArray = Array.ofDim[Double](length)
530530

531531
def count: Long = totalCnt
532532
def loss: Double = {

mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -959,7 +959,7 @@ private class LeastSquaresAggregator(
959959
@transient private lazy val effectiveCoefficientsVector = effectiveCoefAndOffset._1
960960
@transient private lazy val offset = effectiveCoefAndOffset._2
961961

962-
private val gradientSumArray = Array.ofDim[Double](dim)
962+
private lazy val gradientSumArray = Array.ofDim[Double](dim)
963963

964964
/**
965965
* Add a new training instance to this LeastSquaresAggregator, and update the loss and gradient

0 commit comments

Comments
 (0)