Skip to content

Commit 468af0f

Browse files
committed
Merge pull request apache#348 from prabeesh/master
spark -> org.apache.spark Changed package name spark to org.apache.spark which was missing in some of the files
2 parents c3cf047 + a91f14c commit 468af0f

File tree

8 files changed

+12
-12
lines changed

8 files changed

+12
-12
lines changed

examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,9 +134,9 @@ object FeederActor {
134134
* <hostname> and <port> describe the AkkaSystem that Spark Sample feeder is running on.
135135
*
136136
* To run this example locally, you may run Feeder Actor as
137-
* `$ ./bin/run-example spark.streaming.examples.FeederActor 127.0.1.1 9999`
137+
* `$ ./bin/run-example org.apache.spark.streaming.examples.FeederActor 127.0.1.1 9999`
138138
* and then run the example
139-
* `$ ./bin/run-example spark.streaming.examples.ActorWordCount local[2] 127.0.1.1 9999`
139+
* `$ ./bin/run-example org.apache.spark.streaming.examples.ActorWordCount local[2] 127.0.1.1 9999`
140140
*/
141141
object ActorWordCount {
142142
def main(args: Array[String]) {

examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.streaming.StreamingContext._
2828
* <directory> is the directory that Spark Streaming will use to find and read new text files.
2929
*
3030
* To run this on your local machine on directory `localdir`, run this example
31-
* `$ ./bin/run-example spark.streaming.examples.HdfsWordCount local[2] localdir`
31+
* `$ ./bin/run-example org.apache.spark.streaming.examples.HdfsWordCount local[2] localdir`
3232
* Then create a text file in `localdir` and the words in the file will get counted.
3333
*/
3434
object HdfsWordCount {

examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ import org.apache.spark.streaming.util.RawTextHelper._
3535
* <numThreads> is the number of threads the kafka consumer should use
3636
*
3737
* Example:
38-
* `./bin/run-example spark.streaming.examples.KafkaWordCount local[2] zoo01,zoo02,zoo03 my-consumer-group topic1,topic2 1`
38+
* `./bin/run-example org.apache.spark.streaming.examples.KafkaWordCount local[2] zoo01,zoo02,zoo03 my-consumer-group topic1,topic2 1`
3939
*/
4040
object KafkaWordCount {
4141
def main(args: Array[String]) {

examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.streaming.StreamingContext._
2929
* To run this on your local machine, you need to first run a Netcat server
3030
* `$ nc -lk 9999`
3131
* and then run the example
32-
* `$ ./bin/run-example spark.streaming.examples.NetworkWordCount local[2] localhost 9999`
32+
* `$ ./bin/run-example org.apache.spark.streaming.examples.NetworkWordCount local[2] localhost 9999`
3333
*/
3434
object NetworkWordCount {
3535
def main(args: Array[String]) {

examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.streaming.StreamingContext._
2929
* To run this on your local machine, you need to first run a Netcat server
3030
* `$ nc -lk 9999`
3131
* and then run the example
32-
* `$ ./bin/run-example spark.streaming.examples.StatefulNetworkWordCount local[2] localhost 9999`
32+
* `$ ./bin/run-example org.apache.spark.streaming.examples.StatefulNetworkWordCount local[2] localhost 9999`
3333
*/
3434
object StatefulNetworkWordCount {
3535
def main(args: Array[String]) {

examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,9 @@ object SimpleZeroMQPublisher {
6262
* <zeroMQurl> and <topic> describe where zeroMq publisher is running.
6363
*
6464
* To run this example locally, you may run publisher as
65-
* `$ ./bin/run-example spark.streaming.examples.SimpleZeroMQPublisher tcp://127.0.1.1:1234 foo.bar`
65+
* `$ ./bin/run-example org.apache.spark.streaming.examples.SimpleZeroMQPublisher tcp://127.0.1.1:1234 foo.bar`
6666
* and run the example as
67-
* `$ ./bin/run-example spark.streaming.examples.ZeroMQWordCount local[2] tcp://127.0.1.1:1234 foo`
67+
* `$ ./bin/run-example org.apache.spark.streaming.examples.ZeroMQWordCount local[2] tcp://127.0.1.1:1234 foo`
6868
*/
6969
object ZeroMQWordCount {
7070
def main(args: Array[String]) {

examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ object PageView extends Serializable {
3939
/** Generates streaming events to simulate page views on a website.
4040
*
4141
* This should be used in tandem with PageViewStream.scala. Example:
42-
* $ ./bin/run-example spark.streaming.examples.clickstream.PageViewGenerator 44444 10
43-
* $ ./bin/run-example spark.streaming.examples.clickstream.PageViewStream errorRatePerZipCode localhost 44444
42+
* $ ./bin/run-example org.apache.spark.streaming.examples.clickstream.PageViewGenerator 44444 10
43+
* $ ./bin/run-example org.apache.spark.streaming.examples.clickstream.PageViewStream errorRatePerZipCode localhost 44444
4444
*
4545
* When running this, you may want to set the root logging level to ERROR in
4646
* conf/log4j.properties to reduce the verbosity of the output.

examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ import org.apache.spark.SparkContext._
2525
* operators available in Spark streaming.
2626
*
2727
* This should be used in tandem with PageViewStream.scala. Example:
28-
* $ ./bin/run-example spark.streaming.examples.clickstream.PageViewGenerator 44444 10
29-
* $ ./bin/run-example spark.streaming.examples.clickstream.PageViewStream errorRatePerZipCode localhost 44444
28+
* $ ./bin/run-example org.apache.spark.streaming.examples.clickstream.PageViewGenerator 44444 10
29+
* $ ./bin/run-example org.apache.spark.streaming.examples.clickstream.PageViewStream errorRatePerZipCode localhost 44444
3030
*/
3131
object PageViewStream {
3232
def main(args: Array[String]) {

0 commit comments

Comments
 (0)