diff --git a/graph/src/main/scala/org/apache/spark/graph/impl/EdgePartition.scala b/graph/src/main/scala/org/apache/spark/graph/impl/EdgePartition.scala index 3430ffdfc4..e97522feae 100644 --- a/graph/src/main/scala/org/apache/spark/graph/impl/EdgePartition.scala +++ b/graph/src/main/scala/org/apache/spark/graph/impl/EdgePartition.scala @@ -56,16 +56,6 @@ class EdgePartition[@specialized(Char, Int, Boolean, Byte, Long, Float, Double) new EdgePartition(srcIds, dstIds, newData, index) } - def filter(pred: Edge[ED] => Boolean): EdgePartition[ED] = { - val builder = new EdgePartitionBuilder[ED] - iterator.foreach { e => - if (pred(e)) { - builder.add(e.srcId, e.dstId, e.attr) - } - } - builder.toEdgePartition - } - /** * Apply the function f to all edges in this partition. * diff --git a/graph/src/test/scala/org/apache/spark/graph/impl/EdgePartitionSuite.scala b/graph/src/test/scala/org/apache/spark/graph/impl/EdgePartitionSuite.scala index 2991533e89..a52a5653e2 100644 --- a/graph/src/test/scala/org/apache/spark/graph/impl/EdgePartitionSuite.scala +++ b/graph/src/test/scala/org/apache/spark/graph/impl/EdgePartitionSuite.scala @@ -31,17 +31,6 @@ class EdgePartitionSuite extends FunSuite { edges.map(e => e.copy(attr = e.srcId + e.dstId))) } - test("filter") { - val edges = List(Edge(0, 1, 0), Edge(1, 2, 0), Edge(2, 0, 0)) - val builder = new EdgePartitionBuilder[Int] - for (e <- edges) { - builder.add(e.srcId, e.dstId, e.attr) - } - val edgePartition = builder.toEdgePartition - assert(edgePartition.filter(e => e.srcId <= 1).iterator.map(_.copy()).toList === - edges.filter(e => e.srcId <= 1)) - } - test("groupEdges") { val edges = List( Edge(0, 1, 1), Edge(1, 2, 2), Edge(2, 0, 4), Edge(0, 1, 8), Edge(1, 2, 16), Edge(2, 0, 32))