diff --git a/graph/src/main/scala/spark/graph/Pregel.scala b/graph/src/main/scala/spark/graph/Pregel.scala index 1011e3bf12..4bd8810634 100644 --- a/graph/src/main/scala/spark/graph/Pregel.scala +++ b/graph/src/main/scala/spark/graph/Pregel.scala @@ -6,35 +6,32 @@ import spark.RDD object Pregel { - def iterate[VD: ClassManifest, ED: ClassManifest, A: ClassManifest]( - rawGraph: Graph[VD, ED])( + def iterate[VD: ClassManifest, ED: ClassManifest, A: ClassManifest](graph: Graph[VD, ED])( vprog: ( Vertex[VD], A) => VD, sendMsg: (Vid, EdgeWithVertices[VD, ED]) => Option[A], mergeMsg: (A, A) => A, initialMsg: A, numIter: Int) : Graph[VD, ED] = { - var graph = rawGraph.cache + var g = graph.cache var i = 0 def reverseGather(vid: Vid, edge: EdgeWithVertices[VD,ED]) = sendMsg(edge.otherVertex(vid).id, edge) - var msgs: RDD[(Vid, A)] = graph.vertices.map{ v => (v.id, initialMsg) } + var msgs: RDD[(Vid, A)] = g.vertices.map{ v => (v.id, initialMsg) } while (i < numIter) { - def runProg(v: Vertex[VD], msg: Option[A]): VD = - if(msg.isEmpty) v.data else vprog(v, msg.get) + def runProg(v: Vertex[VD], msg: Option[A]): VD = if(msg.isEmpty) v.data else vprog(v, msg.get) - graph = graph.updateVertices(msgs, runProg).cache() + g = g.updateVertices(msgs, runProg).cache() - msgs = graph.flatMapReduceNeighborhood(reverseGather, mergeMsg, EdgeDirection.In) + msgs = g.flatMapReduceNeighborhood(reverseGather, mergeMsg, EdgeDirection.In) i += 1 } - graph - + g } diff --git a/graph/src/test/scala/spark/graph/GraphSuite.scala b/graph/src/test/scala/spark/graph/GraphSuite.scala index 11b3dd1298..64a7aa063b 100644 --- a/graph/src/test/scala/spark/graph/GraphSuite.scala +++ b/graph/src/test/scala/spark/graph/GraphSuite.scala @@ -37,5 +37,10 @@ class GraphSuite extends FunSuite with LocalSparkContext { (v, u: Option[String]) => if (u.isDefined) v.data + u.get else v.data) assert(g.numVertexPartitions === 5) assert(g.numEdgePartitions === 8) + + g = g.reverse + assert(g.numVertexPartitions === 5) + assert(g.numEdgePartitions === 8) + } }