Skip to content
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.mllib.evaluation

import org.apache.spark.annotation.Experimental
import org.apache.spark.rdd.RDD
import org.apache.spark.Logging
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer

/**
* :: Experimental ::
* Evaluator for regression.
*
* @param predictionAndObservations an RDD of (prediction,observation) pairs.
*/
@Experimental
class RegressionMetrics(predictionAndObservations: RDD[(Double, Double)]) extends Logging {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,


/**
* Use MultivariateOnlineSummarizer to calculate mean and variance of different combination.
* MultivariateOnlineSummarizer is a numerically stable algorithm to compute mean and variance
* in a online fashion.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The second sentence is not necessary, which is the doc for MOS but not summarizer. The first sentence can be changed to

Use MultivariateOnlineSummarizer to calculate summary statistics of observations and errors.

*/
private lazy val summarizer: MultivariateOnlineSummarizer = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

minor: I would recommend renaming summarizer to summary: MultivariateStatisticalSummary, because it is static after.

val summarizer: MultivariateOnlineSummarizer = predictionAndObservations.map{
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space before {

case (prediction,observation) => Vectors.dense(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,

Array(observation, observation - prediction)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Vectors.dense(observation, observation - prediction)

)
}.aggregate(new MultivariateOnlineSummarizer())(
(summary, v) => summary.add(v),
(sum1,sum2) => sum1.merge(sum2)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,

)
summarizer
}

/**
* Returns the explained variance regression score.
* explainedVarianceScore = 1 - variance(y - \hat{y}) / variance(y)
* Reference: [[http://en.wikipedia.org/wiki/Explained_variation]]
*/
def explainedVarianceScore: Double = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we need Score here? explainedVariance should be sufficient.

1 - summarizer.variance(1) / summarizer.variance(0)
}

/**
* Returns the mean absolute error, which is a risk function corresponding to the
* expected value of the absolute error loss or l1-norm loss.
*/
def meanAbsoluteError: Double = {
summarizer.normL1(1) / summarizer.count
}

/**
* Returns the mean squared error, which is a risk function corresponding to the
* expected value of the squared error loss or quadratic loss.
*/
def meanSquaredError: Double = {
math.pow(summarizer.normL2(1),2) / summarizer.count
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

safer to do the following (though summarizer.normL2(1) is not optimized):

rmse = summarizer.normL2(1) / math.sqrt(summarizer.count)
rmse * rmse

}

/**
* Returns the root mean squared error, which is defined as the square root of
* the mean squared error.
*/
def rootMeanSquaredError: Double = {
summarizer.normL2(1) / math.sqrt(summarizer.count)
}

/**
* Returns R^2^, the coefficient of determination.
* Reference: [[http://en.wikipedia.org/wiki/Coefficient_of_determination]]
*/
def r2Score: Double = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto. Why do we need Score? r2 should be sufficient.

1 - math.pow(summarizer.normL2(1),2) / (summarizer.variance(0) * (summarizer.count - 1))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,

}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.mllib.evaluation

import org.scalatest.FunSuite

import org.apache.spark.mllib.util.LocalSparkContext
import org.apache.spark.mllib.util.TestingUtils._

class RegressionMetricsSuite extends FunSuite with LocalSparkContext {

test("regression metrics") {
val predictionAndObservations = sc.parallelize(
Seq((2.5,3.0),(0.0,-0.5),(2.0,2.0),(8.0,7.0)),2)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,

val metrics = new RegressionMetrics(predictionAndObservations)
assert(metrics.explainedVarianceScore ~== 0.95717 absTol 1E-5,
"explained variance regression score mismatch")
assert(metrics.meanAbsoluteError ~== 0.5 absTol 1E-5, "mean absolute error mismatch")
assert(metrics.meanSquaredError ~== 0.375 absTol 1E-5, "mean squared error mismatch")
assert(metrics.rootMeanSquaredError ~== 0.61237 absTol 1E-5,
"root mean squared error mismatch")
assert(metrics.r2Score ~== 0.94861 absTol 1E-5, "r2 score mismatch")
}

test("regression metrics with complete fitting") {
val predictionAndObservations = sc.parallelize(
Seq((3.0,3.0),(0.0,0.0),(2.0,2.0),(8.0,8.0)),2)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

space after ,

val metrics = new RegressionMetrics(predictionAndObservations)
assert(metrics.explainedVarianceScore ~== 1.0 absTol 1E-5,
"explained variance regression score mismatch")
assert(metrics.meanAbsoluteError ~== 0.0 absTol 1E-5, "mean absolute error mismatch")
assert(metrics.meanSquaredError ~== 0.0 absTol 1E-5, "mean squared error mismatch")
assert(metrics.rootMeanSquaredError ~== 0.0 absTol 1E-5,
"root mean squared error mismatch")
assert(metrics.r2Score ~== 1.0 absTol 1E-5, "r2 score mismatch")
}
}