[SPARK-5038] Add explicit return type for implicit functions.

As we learned in #3580, not explicitly typing implicit functions can lead to compiler bugs and potentially unexpected runtime behavior.

This is a follow up PR for rest of Spark (outside Spark SQL). The original PR for Spark SQL can be found at https://github.com/apache/spark/pull/3859

Author: Reynold Xin <rxin@databricks.com>

Closes #3860 from rxin/implicit and squashes the following commits:

73702f9 [Reynold Xin] [SPARK-5038] Add explicit return type for implicit functions.
This commit is contained in:
Reynold Xin 2014-12-31 17:07:47 -08:00
parent 4bb12488d5
commit 7749dd6c36
6 changed files with 65 additions and 64 deletions

View file

@ -1708,19 +1708,19 @@ object SparkContext extends Logging {
// Implicit conversions to common Writable types, for saveAsSequenceFile
implicit def intToIntWritable(i: Int) = new IntWritable(i)
implicit def intToIntWritable(i: Int): IntWritable = new IntWritable(i)
implicit def longToLongWritable(l: Long) = new LongWritable(l)
implicit def longToLongWritable(l: Long): LongWritable = new LongWritable(l)
implicit def floatToFloatWritable(f: Float) = new FloatWritable(f)
implicit def floatToFloatWritable(f: Float): FloatWritable = new FloatWritable(f)
implicit def doubleToDoubleWritable(d: Double) = new DoubleWritable(d)
implicit def doubleToDoubleWritable(d: Double): DoubleWritable = new DoubleWritable(d)
implicit def boolToBoolWritable (b: Boolean) = new BooleanWritable(b)
implicit def boolToBoolWritable (b: Boolean): BooleanWritable = new BooleanWritable(b)
implicit def bytesToBytesWritable (aob: Array[Byte]) = new BytesWritable(aob)
implicit def bytesToBytesWritable (aob: Array[Byte]): BytesWritable = new BytesWritable(aob)
implicit def stringToText(s: String) = new Text(s)
implicit def stringToText(s: String): Text = new Text(s)
private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T])
: ArrayWritable = {

View file

@ -24,9 +24,9 @@ import org.apache.spark.util.random.XORShiftRandom
@deprecated("Use Vectors.dense from Spark's mllib.linalg package instead.", "1.0.0")
class Vector(val elements: Array[Double]) extends Serializable {
def length = elements.length
def length: Int = elements.length
def apply(index: Int) = elements(index)
def apply(index: Int): Double = elements(index)
def + (other: Vector): Vector = {
if (length != other.length) {
@ -35,7 +35,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
Vector(length, i => this(i) + other(i))
}
def add(other: Vector) = this + other
def add(other: Vector): Vector = this + other
def - (other: Vector): Vector = {
if (length != other.length) {
@ -44,7 +44,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
Vector(length, i => this(i) - other(i))
}
def subtract(other: Vector) = this - other
def subtract(other: Vector): Vector = this - other
def dot(other: Vector): Double = {
if (length != other.length) {
@ -93,19 +93,19 @@ class Vector(val elements: Array[Double]) extends Serializable {
this
}
def addInPlace(other: Vector) = this +=other
def addInPlace(other: Vector): Vector = this +=other
def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
def multiply (d: Double) = this * d
def multiply (d: Double): Vector = this * d
def / (d: Double): Vector = this * (1 / d)
def divide (d: Double) = this / d
def divide (d: Double): Vector = this / d
def unary_- = this * -1
def unary_- : Vector = this * -1
def sum = elements.reduceLeft(_ + _)
def sum: Double = elements.reduceLeft(_ + _)
def squaredDist(other: Vector): Double = {
var ans = 0.0
@ -119,40 +119,40 @@ class Vector(val elements: Array[Double]) extends Serializable {
def dist(other: Vector): Double = math.sqrt(squaredDist(other))
override def toString = elements.mkString("(", ", ", ")")
override def toString: String = elements.mkString("(", ", ", ")")
}
object Vector {
def apply(elements: Array[Double]) = new Vector(elements)
def apply(elements: Array[Double]): Vector = new Vector(elements)
def apply(elements: Double*) = new Vector(elements.toArray)
def apply(elements: Double*): Vector = new Vector(elements.toArray)
def apply(length: Int, initializer: Int => Double): Vector = {
val elements: Array[Double] = Array.tabulate(length)(initializer)
new Vector(elements)
}
def zeros(length: Int) = new Vector(new Array[Double](length))
def zeros(length: Int): Vector = new Vector(new Array[Double](length))
def ones(length: Int) = Vector(length, _ => 1)
def ones(length: Int): Vector = Vector(length, _ => 1)
/**
* Creates this [[org.apache.spark.util.Vector]] of given length containing random numbers
* between 0.0 and 1.0. Optional scala.util.Random number generator can be provided.
*/
def random(length: Int, random: Random = new XORShiftRandom()) =
def random(length: Int, random: Random = new XORShiftRandom()): Vector =
Vector(length, _ => random.nextDouble())
class Multiplier(num: Double) {
def * (vec: Vector) = vec * num
def * (vec: Vector): Vector = vec * num
}
implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
implicit def doubleToMultiplier(num: Double): Multiplier = new Multiplier(num)
implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
def addInPlace(t1: Vector, t2: Vector) = t1 + t2
def addInPlace(t1: Vector, t2: Vector): Vector = t1 + t2
def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
def zero(initialValue: Vector): Vector = Vector.zeros(initialValue.length)
}
}

View file

@ -129,44 +129,45 @@ private[impl] case class EdgeWithLocalIds[@specialized ED](
srcId: VertexId, dstId: VertexId, localSrcId: Int, localDstId: Int, attr: ED)
private[impl] object EdgeWithLocalIds {
implicit def lexicographicOrdering[ED] = new Ordering[EdgeWithLocalIds[ED]] {
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
if (a.srcId == b.srcId) {
if (a.dstId == b.dstId) 0
else if (a.dstId < b.dstId) -1
implicit def lexicographicOrdering[ED]: Ordering[EdgeWithLocalIds[ED]] =
new Ordering[EdgeWithLocalIds[ED]] {
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
if (a.srcId == b.srcId) {
if (a.dstId == b.dstId) 0
else if (a.dstId < b.dstId) -1
else 1
} else if (a.srcId < b.srcId) -1
else 1
} else if (a.srcId < b.srcId) -1
else 1
}
}
private[graphx] def edgeArraySortDataFormat[ED]
= new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
override def getKey(
data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
data(pos)
}
}
override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}
private[graphx] def edgeArraySortDataFormat[ED] = {
new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
override def getKey(data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
data(pos)
}
override def copyElement(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
dst(dstPos) = src(srcPos)
}
override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}
override def copyRange(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
System.arraycopy(src, srcPos, dst, dstPos, length)
}
override def copyElement(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
dst(dstPos) = src(srcPos)
}
override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
new Array[EdgeWithLocalIds[ED]](length)
override def copyRange(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
System.arraycopy(src, srcPos, dst, dstPos, length)
}
override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
new Array[EdgeWithLocalIds[ED]](length)
}
}
}
}

View file

@ -74,8 +74,8 @@ object ShippableVertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `ShippableVertexPartition`.
*/
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD]) =
new ShippableVertexPartitionOps(partition)
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD])
: ShippableVertexPartitionOps[VD] = new ShippableVertexPartitionOps(partition)
/**
* Implicit evidence that `ShippableVertexPartition` is a member of the

View file

@ -38,8 +38,8 @@ private[graphx] object VertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `VertexPartition`.
*/
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD]) =
new VertexPartitionOps(partition)
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD])
: VertexPartitionOps[VD] = new VertexPartitionOps(partition)
/**
* Implicit evidence that `VertexPartition` is a member of the `VertexPartitionBaseOpsConstructor`

View file

@ -238,8 +238,8 @@ private[graphx] abstract class VertexPartitionBaseOps
* because these methods return a `Self` and this implicit conversion re-wraps that in a
* `VertexPartitionBaseOps`. This relies on the context bound on `Self`.
*/
private implicit def toOps[VD2: ClassTag](
partition: Self[VD2]): VertexPartitionBaseOps[VD2, Self] = {
private implicit def toOps[VD2: ClassTag](partition: Self[VD2])
: VertexPartitionBaseOps[VD2, Self] = {
implicitly[VertexPartitionBaseOpsConstructor[Self]].toOps(partition)
}
}