Skip to content

Commit 0ac574c

Browse files
committed
Merge pull request #1 from ankurdave/label-propagation
LabelPropagation: Fix compile errors and style; rename; add test
2 parents 9830342 + 0e24303 commit 0ac574c

File tree

2 files changed

+67
-21
lines changed

2 files changed

+67
-21
lines changed

graphx/src/main/scala/org/apache/spark/graphx/lib/LPA.scala renamed to graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala

Lines changed: 22 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -20,43 +20,44 @@ package org.apache.spark.graphx.lib
2020
import scala.reflect.ClassTag
2121
import org.apache.spark.graphx._
2222

23-
/** LPA algorithm. */
24-
object LPA {
23+
/** Label Propagation algorithm. */
24+
object LabelPropagation {
2525
/**
26-
* Run LPA (label propogation algorithm) for detecting communities in networks using the pregel framework.
27-
*
28-
* Each node in the network is initially assigned to its own community. At every super step
29-
* nodes send their community affiliation to all neighbors and update their state to the mode
30-
* community affiliation of incomming messages.
26+
* Run static Label Propagation for detecting communities in networks.
3127
*
32-
* LPA is a standard community detection algorithm for graphs. It is very inexpensive
28+
* Each node in the network is initially assigned to its own community. At every superstep, nodes
29+
* send their community affiliation to all neighbors and update their state to the mode community
30+
* affiliation of incoming messages.
31+
*
32+
* LPA is a standard community detection algorithm for graphs. It is very inexpensive
3333
* computationally, although (1) convergence is not guaranteed and (2) one can end up with
3434
* trivial solutions (all nodes are identified into a single community).
3535
*
36-
* @tparam VD the vertex attribute type (discarded in the computation)
3736
* @tparam ED the edge attribute type (not used in the computation)
3837
*
3938
* @param graph the graph for which to compute the community affiliation
40-
* @param maxSteps the number of supersteps of LPA to be performed
39+
* @param maxSteps the number of supersteps of LPA to be performed. Because this is a static
40+
* implementation, the algorithm will run for exactly this many supersteps.
4141
*
4242
* @return a graph with vertex attributes containing the label of community affiliation
4343
*/
44-
def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED], maxSteps: Int): Graph[VertexId, Long]{
44+
def run[ED: ClassTag](graph: Graph[_, ED], maxSteps: Int): Graph[VertexId, ED] = {
4545
val lpaGraph = graph.mapVertices { case (vid, _) => vid }
46-
def sendMessage(edge: EdgeTriplet[VertexId, ED]) = {
47-
Iterator((e.srcId, Map(e.dstAttr -> 1L)),(e.dstId, Map(e.srcAttr -> 1L)))
46+
def sendMessage(e: EdgeTriplet[VertexId, ED]) = {
47+
Iterator((e.srcId, Map(e.dstAttr -> 1L)), (e.dstId, Map(e.srcAttr -> 1L)))
4848
}
49-
def mergeMessage(count1: Map[VertexId, Long], count2: Map[VertexId, Long]): Map[VertexId, Long] = {
49+
def mergeMessage(count1: Map[VertexId, Long], count2: Map[VertexId, Long])
50+
: Map[VertexId, Long] = {
5051
(count1.keySet ++ count2.keySet).map { i =>
51-
val count1Val = count1.getOrElse(i,0L)
52-
val count2Val = count2.getOrElse(i,0L)
53-
i -> (count1Val +count2Val)
54-
}.toMap
52+
val count1Val = count1.getOrElse(i, 0L)
53+
val count2Val = count2.getOrElse(i, 0L)
54+
i -> (count1Val + count2Val)
55+
}.toMap
5556
}
56-
def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long])={
57-
if (message.isEmpty) attr else message.maxBy{_._2}._1),
57+
def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]) = {
58+
if (message.isEmpty) attr else message.maxBy(_._2)._1
5859
}
59-
val initialMessage = Map[VertexId,Long]()
60+
val initialMessage = Map[VertexId, Long]()
6061
Pregel(lpaGraph, initialMessage, maxIterations = maxSteps)(
6162
vprog = vertexProgram,
6263
sendMsg = sendMessage,
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.graphx.lib
19+
20+
import org.scalatest.FunSuite
21+
22+
import org.apache.spark.graphx._
23+
24+
class LabelPropagationSuite extends FunSuite with LocalSparkContext {
25+
test("Label Propagation") {
26+
withSpark { sc =>
27+
// Construct a graph with two cliques connected by a single edge
28+
val n = 5
29+
val clique1 = for (u <- 0L until n; v <- 0L until n) yield Edge(u, v, 1)
30+
val clique2 = for (u <- 0L to n; v <- 0L to n) yield Edge(u + n, v + n, 1)
31+
val twoCliques = sc.parallelize(clique1 ++ clique2 :+ Edge(0L, n, 1))
32+
val graph = Graph.fromEdges(twoCliques, 1)
33+
// Run label propagation
34+
val labels = LabelPropagation.run(graph, n * 4).cache()
35+
36+
// All vertices within a clique should have the same label
37+
val clique1Labels = labels.vertices.filter(_._1 < n).map(_._2).collect.toArray
38+
assert(clique1Labels.forall(_ == clique1Labels(0)))
39+
val clique2Labels = labels.vertices.filter(_._1 >= n).map(_._2).collect.toArray
40+
assert(clique2Labels.forall(_ == clique2Labels(0)))
41+
// The two cliques should have different labels
42+
assert(clique1Labels(0) != clique2Labels(0))
43+
}
44+
}
45+
}

0 commit comments

Comments
 (0)