File tree Expand file tree Collapse file tree 9 files changed +83
-8
lines changed
java/org/apache/spark/examples
scala/org/apache/spark/examples Expand file tree Collapse file tree 9 files changed +83
-8
lines changed Original file line number Diff line number Diff line change 3030
3131/**
3232 * Logistic regression based classification.
33+ *
34+ * This is an example implementation for learning how to use Spark. For more conventional use,
35+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
36+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
3337 */
3438public final class JavaHdfsLR {
3539
3640 private static final int D = 10 ; // Number of dimensions
3741 private static final Random rand = new Random (42 );
3842
43+ static void showWarning () {
44+ String warning = "WARN: This is a naive implementation of Logistic Regression " +
45+ "and is given as an example!\n " +
46+ "Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD " +
47+ "or org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS " +
48+ "for more conventional use." ;
49+ System .err .println (warning );
50+ }
51+
3952 static class DataPoint implements Serializable {
4053 DataPoint (double [] x , double y ) {
4154 this .x = x ;
@@ -109,6 +122,8 @@ public static void main(String[] args) {
109122 System .exit (1 );
110123 }
111124
125+ showWarning ();
126+
112127 SparkConf sparkConf = new SparkConf ().setAppName ("JavaHdfsLR" );
113128 JavaSparkContext sc = new JavaSparkContext (sparkConf );
114129 JavaRDD <String > lines = sc .textFile (args [0 ]);
Original file line number Diff line number Diff line change 4545 * URL neighbor URL
4646 * ...
4747 * where URL and their neighbors are separated by space(s).
48+ *
49+ * This is an example implementation for learning how to use Spark. For more conventional use,
50+ * please refer to org.apache.spark.graphx.lib.PageRank
4851 */
4952public final class JavaPageRank {
5053 private static final Pattern SPACES = Pattern .compile ("\\ s+" );
5154
55+ static void showWarning () {
56+ String warning = "WARN: This is a naive implementation of PageRank " +
57+ "and is given as an example! \n " +
58+ "Please use the PageRank implementation found in " +
59+ "org.apache.spark.graphx.lib.PageRank for more conventional use." ;
60+ System .err .println (warning );
61+ }
62+
5263 private static class Sum implements Function2 <Double , Double , Double > {
5364 @ Override
5465 public Double call (Double a , Double b ) {
@@ -62,6 +73,8 @@ public static void main(String[] args) throws Exception {
6273 System .exit (1 );
6374 }
6475
76+ showWarning ();
77+
6578 SparkConf sparkConf = new SparkConf ().setAppName ("JavaPageRank" );
6679 JavaSparkContext ctx = new JavaSparkContext (sparkConf );
6780
Original file line number Diff line number Diff line change 1515# limitations under the License.
1616#
1717
18+ """
19+ This is an example implementation of PageRank. For more conventional use,
20+ Please refer to PageRank implementation provided by graphx
21+ """
22+
1823import re
1924import sys
2025from operator import add
@@ -40,6 +45,9 @@ def parseNeighbors(urls):
4045 print >> sys .stderr , "Usage: pagerank <file> <iterations>"
4146 exit (- 1 )
4247
48+ print >> sys .stderr , """WARN: This is a naive implementation of PageRank and is
49+ given as an example! Please refer to PageRank implementation provided by graphx"""
50+
4351 # Initialize the spark context.
4452 sc = SparkContext (appName = "PythonPageRank" )
4553
Original file line number Diff line number Diff line change @@ -25,7 +25,8 @@ import breeze.linalg.{Vector, DenseVector}
2525 * Logistic regression based classification.
2626 *
2727 * This is an example implementation for learning how to use Spark. For more conventional use,
28- * please refer to org.apache.spark.mllib.classification.LogisticRegression
28+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
29+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
2930 */
3031object LocalFileLR {
3132 val D = 10 // Numer of dimensions
@@ -41,7 +42,8 @@ object LocalFileLR {
4142 def showWarning () {
4243 System .err.println(
4344 """ WARN: This is a naive implementation of Logistic Regression and is given as an example!
44- |Please use the LogisticRegression method found in org.apache.spark.mllib.classification
45+ |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
46+ |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
4547 |for more conventional use.
4648 """ .stripMargin)
4749 }
Original file line number Diff line number Diff line change @@ -25,7 +25,8 @@ import breeze.linalg.{Vector, DenseVector}
2525 * Logistic regression based classification.
2626 *
2727 * This is an example implementation for learning how to use Spark. For more conventional use,
28- * please refer to org.apache.spark.mllib.classification.LogisticRegression
28+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
29+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
2930 */
3031object LocalLR {
3132 val N = 10000 // Number of data points
@@ -48,7 +49,8 @@ object LocalLR {
4849 def showWarning () {
4950 System .err.println(
5051 """ WARN: This is a naive implementation of Logistic Regression and is given as an example!
51- |Please use the LogisticRegression method found in org.apache.spark.mllib.classification
52+ |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
53+ |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
5254 |for more conventional use.
5355 """ .stripMargin)
5456 }
Original file line number Diff line number Diff line change @@ -32,7 +32,8 @@ import org.apache.spark.scheduler.InputFormatInfo
3232 * Logistic regression based classification.
3333 *
3434 * This is an example implementation for learning how to use Spark. For more conventional use,
35- * please refer to org.apache.spark.mllib.classification.LogisticRegression
35+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
36+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
3637 */
3738object SparkHdfsLR {
3839 val D = 10 // Numer of dimensions
@@ -54,7 +55,8 @@ object SparkHdfsLR {
5455 def showWarning () {
5556 System .err.println(
5657 """ WARN: This is a naive implementation of Logistic Regression and is given as an example!
57- |Please use the LogisticRegression method found in org.apache.spark.mllib.classification
58+ |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
59+ |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
5860 |for more conventional use.
5961 """ .stripMargin)
6062 }
Original file line number Diff line number Diff line change @@ -30,7 +30,8 @@ import org.apache.spark._
3030 * Usage: SparkLR [slices]
3131 *
3232 * This is an example implementation for learning how to use Spark. For more conventional use,
33- * please refer to org.apache.spark.mllib.classification.LogisticRegression
33+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
34+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
3435 */
3536object SparkLR {
3637 val N = 10000 // Number of data points
@@ -53,7 +54,8 @@ object SparkLR {
5354 def showWarning () {
5455 System .err.println(
5556 """ WARN: This is a naive implementation of Logistic Regression and is given as an example!
56- |Please use the LogisticRegression method found in org.apache.spark.mllib.classification
57+ |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
58+ |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
5759 |for more conventional use.
5860 """ .stripMargin)
5961 }
Original file line number Diff line number Diff line change @@ -28,13 +28,28 @@ import org.apache.spark.{SparkConf, SparkContext}
2828 * URL neighbor URL
2929 * ...
3030 * where URL and their neighbors are separated by space(s).
31+ *
32+ * This is an example implementation for learning how to use Spark. For more conventional use,
33+ * please refer to org.apache.spark.graphx.lib.PageRank
3134 */
3235object SparkPageRank {
36+
37+ def showWarning () {
38+ System .err.println(
39+ """ WARN: This is a naive implementation of PageRank and is given as an example!
40+ |Please use the PageRank implementation found in org.apache.spark.graphx.lib.PageRank
41+ |for more conventional use.
42+ """ .stripMargin)
43+ }
44+
3345 def main (args : Array [String ]) {
3446 if (args.length < 1 ) {
3547 System .err.println(" Usage: SparkPageRank <file> <iter>" )
3648 System .exit(1 )
3749 }
50+
51+ showWarning()
52+
3853 val sparkConf = new SparkConf ().setAppName(" PageRank" )
3954 val iters = if (args.length > 0 ) args(1 ).toInt else 10
4055 val ctx = new SparkContext (sparkConf)
Original file line number Diff line number Diff line change @@ -32,11 +32,24 @@ import org.apache.spark.storage.StorageLevel
3232/**
3333 * Logistic regression based classification.
3434 * This example uses Tachyon to persist rdds during computation.
35+ *
36+ * This is an example implementation for learning how to use Spark. For more conventional use,
37+ * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
38+ * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs.
3539 */
3640object SparkTachyonHdfsLR {
3741 val D = 10 // Numer of dimensions
3842 val rand = new Random (42 )
3943
44+ def showWarning () {
45+ System .err.println(
46+ """ WARN: This is a naive implementation of Logistic Regression and is given as an example!
47+ |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
48+ |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
49+ |for more conventional use.
50+ """ .stripMargin)
51+ }
52+
4053 case class DataPoint (x : Vector [Double ], y : Double )
4154
4255 def parsePoint (line : String ): DataPoint = {
@@ -51,6 +64,9 @@ object SparkTachyonHdfsLR {
5164 }
5265
5366 def main (args : Array [String ]) {
67+
68+ showWarning()
69+
5470 val inputPath = args(0 )
5571 val sparkConf = new SparkConf ().setAppName(" SparkTachyonHdfsLR" )
5672 val conf = new Configuration ()
You can’t perform that action at this time.
0 commit comments