diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml index 5444ae6d70..bd507c2cb6 100644 --- a/common/network-common/pom.xml +++ b/common/network-common/pom.xml @@ -66,7 +66,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} org.mockito diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml index e736436aec..810ec10ca0 100644 --- a/common/network-shuffle/pom.xml +++ b/common/network-shuffle/pom.xml @@ -80,7 +80,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} log4j diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml index 1fd3af2e6e..bc83ef24c3 100644 --- a/common/network-yarn/pom.xml +++ b/common/network-yarn/pom.xml @@ -48,7 +48,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml index bbbb0bd5aa..8bc1f52798 100644 --- a/common/sketch/pom.xml +++ b/common/sketch/pom.xml @@ -38,7 +38,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/common/tags/pom.xml b/common/tags/pom.xml index 14e94eca93..8e702b4fef 100644 --- a/common/tags/pom.xml +++ b/common/tags/pom.xml @@ -27,12 +27,12 @@ org.apache.spark - spark-tags_2.11 + spark-test-tags_2.11 jar - Spark Project Tags + Spark Project Test Tags http://spark.apache.org/ - tags + test-tags diff --git a/common/tags/src/test/java/org/apache/spark/tags/DockerTest.java b/common/tags/src/main/java/org/apache/spark/tags/DockerTest.java similarity index 100% rename from common/tags/src/test/java/org/apache/spark/tags/DockerTest.java rename to common/tags/src/main/java/org/apache/spark/tags/DockerTest.java diff --git a/common/tags/src/test/java/org/apache/spark/tags/ExtendedHiveTest.java b/common/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java similarity index 100% rename from common/tags/src/test/java/org/apache/spark/tags/ExtendedHiveTest.java rename to common/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java diff --git a/common/tags/src/test/java/org/apache/spark/tags/ExtendedYarnTest.java b/common/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java similarity index 100% rename from common/tags/src/test/java/org/apache/spark/tags/ExtendedYarnTest.java rename to common/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml index 6f3bd39491..93b9580f26 100644 --- a/common/unsafe/pom.xml +++ b/common/unsafe/pom.xml @@ -61,7 +61,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} org.mockito diff --git a/core/pom.xml b/core/pom.xml index 65a8091c69..7349ad35b9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -317,7 +317,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/common/tags/src/main/scala/org/apache/spark/annotation/Since.scala b/core/src/main/scala/org/apache/spark/annotation/Since.scala similarity index 100% rename from common/tags/src/main/scala/org/apache/spark/annotation/Since.scala rename to core/src/main/scala/org/apache/spark/annotation/Since.scala diff --git a/common/tags/src/main/scala/org/apache/spark/annotation/package-info.java b/core/src/main/scala/org/apache/spark/annotation/package-info.java similarity index 100% rename from common/tags/src/main/scala/org/apache/spark/annotation/package-info.java rename to core/src/main/scala/org/apache/spark/annotation/package-info.java diff --git a/common/tags/src/main/scala/org/apache/spark/annotation/package.scala b/core/src/main/scala/org/apache/spark/annotation/package.scala similarity index 100% rename from common/tags/src/main/scala/org/apache/spark/annotation/package.scala rename to core/src/main/scala/org/apache/spark/annotation/package.scala diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index 8b6ce1436d..6d47733ec1 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -93,18 +93,9 @@ class Module(object): return hash(self.name) -tags = Module( - name="tags", - dependencies=[], - source_file_regexes=[ - "common/tags/", - ] -) - - catalyst = Module( name="catalyst", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "sql/catalyst/", ], @@ -174,7 +165,7 @@ hivecontext_compatibility = Module( sketch = Module( name="sketch", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "common/sketch/", ], @@ -186,7 +177,7 @@ sketch = Module( graphx = Module( name="graphx", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "graphx/", ], @@ -198,7 +189,7 @@ graphx = Module( streaming = Module( name="streaming", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "streaming", ], @@ -214,7 +205,7 @@ streaming = Module( # fail other PRs. streaming_kinesis_asl = Module( name="streaming-kinesis-asl", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "external/kinesis-asl/", "external/kinesis-asl-assembly/", @@ -279,7 +270,7 @@ streaming_flume_assembly = Module( mllib_local = Module( name="mllib-local", - dependencies=[tags], + dependencies=[], source_file_regexes=[ "mllib-local", ], diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml index f67e2a993f..53a24f3e06 100644 --- a/external/docker-integration-tests/pom.xml +++ b/external/docker-integration-tests/pom.xml @@ -128,10 +128,9 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} ${project.version} test - tests mysql diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index 016af934bb..e4effe158c 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -92,7 +92,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/external/flume/pom.xml b/external/flume/pom.xml index f51d334de0..d650dd034d 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -68,7 +68,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml index 60e3ff60df..1ea9196e9d 100644 --- a/external/java8-tests/pom.xml +++ b/external/java8-tests/pom.xml @@ -72,7 +72,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml index 7f1cfa4e7d..68d52e9339 100644 --- a/external/kafka/pom.xml +++ b/external/kafka/pom.xml @@ -88,7 +88,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/external/kinesis-asl/pom.xml b/external/kinesis-asl/pom.xml index b5f5ff2854..935155eb5d 100644 --- a/external/kinesis-asl/pom.xml +++ b/external/kinesis-asl/pom.xml @@ -77,7 +77,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/graphx/pom.xml b/graphx/pom.xml index fc6c700dd1..1813f383cd 100644 --- a/graphx/pom.xml +++ b/graphx/pom.xml @@ -72,7 +72,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/launcher/pom.xml b/launcher/pom.xml index e7303853e6..ef73194882 100644 --- a/launcher/pom.xml +++ b/launcher/pom.xml @@ -65,7 +65,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/mllib-local/pom.xml b/mllib-local/pom.xml index 078ff3033a..60b615a07f 100644 --- a/mllib-local/pom.xml +++ b/mllib-local/pom.xml @@ -57,10 +57,6 @@ mockito-core test - - org.apache.spark - spark-tags_${scala.binary.version} - diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala index a47526d36f..8204b5af02 100644 --- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala +++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala @@ -24,28 +24,21 @@ import scala.collection.mutable.{ArrayBuffer, ArrayBuilder => MArrayBuilder, Has import breeze.linalg.{CSCMatrix => BSM, DenseMatrix => BDM, Matrix => BM} import com.github.fommil.netlib.BLAS.{getInstance => blas} -import org.apache.spark.annotation.Since - /** * Trait for a local matrix. */ -@Since("2.0.0") sealed trait Matrix extends Serializable { /** Number of rows. */ - @Since("2.0.0") def numRows: Int /** Number of columns. */ - @Since("2.0.0") def numCols: Int /** Flag that keeps track whether the matrix is transposed or not. False by default. */ - @Since("2.0.0") val isTransposed: Boolean = false /** Converts to a dense array in column major. */ - @Since("2.0.0") def toArray: Array[Double] = { val newArray = new Array[Double](numRows * numCols) foreachActive { (i, j, v) => @@ -58,21 +51,18 @@ sealed trait Matrix extends Serializable { * Returns an iterator of column vectors. * This operation could be expensive, depending on the underlying storage. */ - @Since("2.0.0") def colIter: Iterator[Vector] /** * Returns an iterator of row vectors. * This operation could be expensive, depending on the underlying storage. */ - @Since("2.0.0") def rowIter: Iterator[Vector] = this.transpose.colIter /** Converts to a breeze matrix. */ private[ml] def toBreeze: BM[Double] /** Gets the (i, j)-th element. */ - @Since("2.0.0") def apply(i: Int, j: Int): Double /** Return the index for the (i, j)-th element in the backing array. */ @@ -82,15 +72,12 @@ sealed trait Matrix extends Serializable { private[ml] def update(i: Int, j: Int, v: Double): Unit /** Get a deep copy of the matrix. */ - @Since("2.0.0") def copy: Matrix /** Transpose the Matrix. Returns a new `Matrix` instance sharing the same underlying data. */ - @Since("2.0.0") def transpose: Matrix /** Convenience method for `Matrix`-`DenseMatrix` multiplication. */ - @Since("2.0.0") def multiply(y: DenseMatrix): DenseMatrix = { val C: DenseMatrix = DenseMatrix.zeros(numRows, y.numCols) BLAS.gemm(1.0, this, y, 0.0, C) @@ -98,13 +85,11 @@ sealed trait Matrix extends Serializable { } /** Convenience method for `Matrix`-`DenseVector` multiplication. For binary compatibility. */ - @Since("2.0.0") def multiply(y: DenseVector): DenseVector = { multiply(y.asInstanceOf[Vector]) } /** Convenience method for `Matrix`-`Vector` multiplication. */ - @Since("2.0.0") def multiply(y: Vector): DenseVector = { val output = new DenseVector(new Array[Double](numRows)) BLAS.gemv(1.0, this, y, 0.0, output) @@ -115,7 +100,6 @@ sealed trait Matrix extends Serializable { override def toString: String = toBreeze.toString() /** A human readable representation of the matrix with maximum lines and width */ - @Since("2.0.0") def toString(maxLines: Int, maxLineWidth: Int): String = toBreeze.toString(maxLines, maxLineWidth) /** @@ -145,13 +129,11 @@ sealed trait Matrix extends Serializable { /** * Find the number of non-zero active values. */ - @Since("2.0.0") def numNonzeros: Int /** * Find the number of values stored explicitly. These values can be zero as well. */ - @Since("2.0.0") def numActives: Int } @@ -172,11 +154,10 @@ sealed trait Matrix extends Serializable { * @param isTransposed whether the matrix is transposed. If true, `values` stores the matrix in * row major. */ -@Since("2.0.0") -class DenseMatrix @Since("2.0.0") ( - @Since("2.0.0") val numRows: Int, - @Since("2.0.0") val numCols: Int, - @Since("2.0.0") val values: Array[Double], +class DenseMatrix ( + val numRows: Int, + val numCols: Int, + val values: Array[Double], override val isTransposed: Boolean) extends Matrix { require(values.length == numRows * numCols, "The number of values supplied doesn't match the " + @@ -197,7 +178,6 @@ class DenseMatrix @Since("2.0.0") ( * @param numCols number of columns * @param values matrix entries in column major */ - @Since("2.0.0") def this(numRows: Int, numCols: Int, values: Array[Double]) = this(numRows, numCols, values, false) @@ -286,7 +266,6 @@ class DenseMatrix @Since("2.0.0") ( * Generate a `SparseMatrix` from the given `DenseMatrix`. The new matrix will have isTransposed * set to false. */ - @Since("2.0.0") def toSparse: SparseMatrix = { val spVals: MArrayBuilder[Double] = new MArrayBuilder.ofDouble val colPtrs: Array[Int] = new Array[Int](numCols + 1) @@ -328,7 +307,6 @@ class DenseMatrix @Since("2.0.0") ( /** * Factory methods for [[org.apache.spark.ml.linalg.DenseMatrix]]. */ -@Since("2.0.0") object DenseMatrix { /** @@ -337,7 +315,6 @@ object DenseMatrix { * @param numCols number of columns of the matrix * @return `DenseMatrix` with size `numRows` x `numCols` and values of zeros */ - @Since("2.0.0") def zeros(numRows: Int, numCols: Int): DenseMatrix = { require(numRows.toLong * numCols <= Int.MaxValue, s"$numRows x $numCols dense matrix is too large to allocate") @@ -350,7 +327,6 @@ object DenseMatrix { * @param numCols number of columns of the matrix * @return `DenseMatrix` with size `numRows` x `numCols` and values of ones */ - @Since("2.0.0") def ones(numRows: Int, numCols: Int): DenseMatrix = { require(numRows.toLong * numCols <= Int.MaxValue, s"$numRows x $numCols dense matrix is too large to allocate") @@ -362,7 +338,6 @@ object DenseMatrix { * @param n number of rows and columns of the matrix * @return `DenseMatrix` with size `n` x `n` and values of ones on the diagonal */ - @Since("2.0.0") def eye(n: Int): DenseMatrix = { val identity = DenseMatrix.zeros(n, n) var i = 0 @@ -380,7 +355,6 @@ object DenseMatrix { * @param rng a random number generator * @return `DenseMatrix` with size `numRows` x `numCols` and values in U(0, 1) */ - @Since("2.0.0") def rand(numRows: Int, numCols: Int, rng: Random): DenseMatrix = { require(numRows.toLong * numCols <= Int.MaxValue, s"$numRows x $numCols dense matrix is too large to allocate") @@ -394,7 +368,6 @@ object DenseMatrix { * @param rng a random number generator * @return `DenseMatrix` with size `numRows` x `numCols` and values in N(0, 1) */ - @Since("2.0.0") def randn(numRows: Int, numCols: Int, rng: Random): DenseMatrix = { require(numRows.toLong * numCols <= Int.MaxValue, s"$numRows x $numCols dense matrix is too large to allocate") @@ -407,7 +380,6 @@ object DenseMatrix { * @return Square `DenseMatrix` with size `values.length` x `values.length` and `values` * on the diagonal */ - @Since("2.0.0") def diag(vector: Vector): DenseMatrix = { val n = vector.size val matrix = DenseMatrix.zeros(n, n) @@ -443,13 +415,12 @@ object DenseMatrix { * Compressed Sparse Row (CSR) format, where `colPtrs` behaves as rowPtrs, * and `rowIndices` behave as colIndices, and `values` are stored in row major. */ -@Since("2.0.0") -class SparseMatrix @Since("2.0.0") ( - @Since("2.0.0") val numRows: Int, - @Since("2.0.0") val numCols: Int, - @Since("2.0.0") val colPtrs: Array[Int], - @Since("2.0.0") val rowIndices: Array[Int], - @Since("2.0.0") val values: Array[Double], +class SparseMatrix ( + val numRows: Int, + val numCols: Int, + val colPtrs: Array[Int], + val rowIndices: Array[Int], + val values: Array[Double], override val isTransposed: Boolean) extends Matrix { require(values.length == rowIndices.length, "The number of row indices and values don't match! " + @@ -480,7 +451,6 @@ class SparseMatrix @Since("2.0.0") ( * order for each column * @param values non-zero matrix entries in column major */ - @Since("2.0.0") def this( numRows: Int, numCols: Int, @@ -580,7 +550,6 @@ class SparseMatrix @Since("2.0.0") ( * Generate a `DenseMatrix` from the given `SparseMatrix`. The new matrix will have isTransposed * set to false. */ - @Since("2.0.0") def toDense: DenseMatrix = { new DenseMatrix(numRows, numCols, toArray) } @@ -625,7 +594,6 @@ class SparseMatrix @Since("2.0.0") ( /** * Factory methods for [[org.apache.spark.ml.linalg.SparseMatrix]]. */ -@Since("2.0.0") object SparseMatrix { /** @@ -637,7 +605,6 @@ object SparseMatrix { * @param entries Array of (i, j, value) tuples * @return The corresponding `SparseMatrix` */ - @Since("2.0.0") def fromCOO(numRows: Int, numCols: Int, entries: Iterable[(Int, Int, Double)]): SparseMatrix = { val sortedEntries = entries.toSeq.sortBy(v => (v._2, v._1)) val numEntries = sortedEntries.size @@ -686,7 +653,6 @@ object SparseMatrix { * @param n number of rows and columns of the matrix * @return `SparseMatrix` with size `n` x `n` and values of ones on the diagonal */ - @Since("2.0.0") def speye(n: Int): SparseMatrix = { new SparseMatrix(n, n, (0 to n).toArray, (0 until n).toArray, Array.fill(n)(1.0)) } @@ -756,7 +722,6 @@ object SparseMatrix { * @param rng a random number generator * @return `SparseMatrix` with size `numRows` x `numCols` and values in U(0, 1) */ - @Since("2.0.0") def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = { val mat = genRandMatrix(numRows, numCols, density, rng) mat.update(i => rng.nextDouble()) @@ -770,7 +735,6 @@ object SparseMatrix { * @param rng a random number generator * @return `SparseMatrix` with size `numRows` x `numCols` and values in N(0, 1) */ - @Since("2.0.0") def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = { val mat = genRandMatrix(numRows, numCols, density, rng) mat.update(i => rng.nextGaussian()) @@ -782,7 +746,6 @@ object SparseMatrix { * @return Square `SparseMatrix` with size `values.length` x `values.length` and non-zero * `values` on the diagonal */ - @Since("2.0.0") def spdiag(vector: Vector): SparseMatrix = { val n = vector.size vector match { @@ -799,7 +762,6 @@ object SparseMatrix { /** * Factory methods for [[org.apache.spark.ml.linalg.Matrix]]. */ -@Since("2.0.0") object Matrices { /** @@ -809,7 +771,6 @@ object Matrices { * @param numCols number of columns * @param values matrix entries in column major */ - @Since("2.0.0") def dense(numRows: Int, numCols: Int, values: Array[Double]): Matrix = { new DenseMatrix(numRows, numCols, values) } @@ -823,7 +784,6 @@ object Matrices { * @param rowIndices the row index of the entry * @param values non-zero matrix entries in column major */ - @Since("2.0.0") def sparse( numRows: Int, numCols: Int, @@ -865,7 +825,6 @@ object Matrices { * @param numCols number of columns of the matrix * @return `Matrix` with size `numRows` x `numCols` and values of zeros */ - @Since("2.0.0") def zeros(numRows: Int, numCols: Int): Matrix = DenseMatrix.zeros(numRows, numCols) /** @@ -874,7 +833,6 @@ object Matrices { * @param numCols number of columns of the matrix * @return `Matrix` with size `numRows` x `numCols` and values of ones */ - @Since("2.0.0") def ones(numRows: Int, numCols: Int): Matrix = DenseMatrix.ones(numRows, numCols) /** @@ -882,7 +840,6 @@ object Matrices { * @param n number of rows and columns of the matrix * @return `Matrix` with size `n` x `n` and values of ones on the diagonal */ - @Since("2.0.0") def eye(n: Int): Matrix = DenseMatrix.eye(n) /** @@ -890,7 +847,6 @@ object Matrices { * @param n number of rows and columns of the matrix * @return `Matrix` with size `n` x `n` and values of ones on the diagonal */ - @Since("2.0.0") def speye(n: Int): Matrix = SparseMatrix.speye(n) /** @@ -900,7 +856,6 @@ object Matrices { * @param rng a random number generator * @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1) */ - @Since("2.0.0") def rand(numRows: Int, numCols: Int, rng: Random): Matrix = DenseMatrix.rand(numRows, numCols, rng) @@ -912,7 +867,6 @@ object Matrices { * @param rng a random number generator * @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1) */ - @Since("2.0.0") def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix = SparseMatrix.sprand(numRows, numCols, density, rng) @@ -923,7 +877,6 @@ object Matrices { * @param rng a random number generator * @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1) */ - @Since("2.0.0") def randn(numRows: Int, numCols: Int, rng: Random): Matrix = DenseMatrix.randn(numRows, numCols, rng) @@ -935,7 +888,6 @@ object Matrices { * @param rng a random number generator * @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1) */ - @Since("2.0.0") def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix = SparseMatrix.sprandn(numRows, numCols, density, rng) @@ -945,7 +897,6 @@ object Matrices { * @return Square `Matrix` with size `values.length` x `values.length` and `values` * on the diagonal */ - @Since("2.0.0") def diag(vector: Vector): Matrix = DenseMatrix.diag(vector) /** @@ -955,7 +906,6 @@ object Matrices { * @param matrices array of matrices * @return a single `Matrix` composed of the matrices that were horizontally concatenated */ - @Since("2.0.0") def horzcat(matrices: Array[Matrix]): Matrix = { if (matrices.isEmpty) { return new DenseMatrix(0, 0, Array[Double]()) @@ -1014,7 +964,6 @@ object Matrices { * @param matrices array of matrices * @return a single `Matrix` composed of the matrices that were vertically concatenated */ - @Since("2.0.0") def vertcat(matrices: Array[Matrix]): Matrix = { if (matrices.isEmpty) { return new DenseMatrix(0, 0, Array[Double]()) diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala index 5b3b3b6887..4275a22ae0 100644 --- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala +++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala @@ -28,26 +28,21 @@ import org.json4s.DefaultFormats import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, parse => parseJson, render} -import org.apache.spark.annotation.Since - /** * Represents a numeric vector, whose index type is Int and value type is Double. * * Note: Users should not implement this interface. */ -@Since("2.0.0") sealed trait Vector extends Serializable { /** * Size of the vector. */ - @Since("2.0.0") def size: Int /** * Converts the instance to a double array. */ - @Since("2.0.0") def toArray: Array[Double] override def equals(other: Any): Boolean = { @@ -101,13 +96,11 @@ sealed trait Vector extends Serializable { * Gets the value of the ith element. * @param i index */ - @Since("2.0.0") def apply(i: Int): Double = toBreeze(i) /** * Makes a deep copy of this vector. */ - @Since("2.0.0") def copy: Vector = { throw new NotImplementedError(s"copy is not implemented for ${this.getClass}.") } @@ -119,38 +112,32 @@ sealed trait Vector extends Serializable { * the vector with type `Int`, and the second parameter is the corresponding value * with type `Double`. */ - @Since("2.0.0") def foreachActive(f: (Int, Double) => Unit): Unit /** * Number of active entries. An "active entry" is an element which is explicitly stored, * regardless of its value. Note that inactive entries have value 0. */ - @Since("2.0.0") def numActives: Int /** * Number of nonzero elements. This scans all active values and count nonzeros. */ - @Since("2.0.0") def numNonzeros: Int /** * Converts this vector to a sparse vector with all explicit zeros removed. */ - @Since("2.0.0") def toSparse: SparseVector /** * Converts this vector to a dense vector. */ - @Since("2.0.0") def toDense: DenseVector = new DenseVector(this.toArray) /** * Returns a vector in either dense or sparse format, whichever uses less storage. */ - @Since("2.0.0") def compressed: Vector = { val nnz = numNonzeros // A dense vector needs 8 * size + 8 bytes, while a sparse vector needs 12 * nnz + 20 bytes. @@ -165,13 +152,11 @@ sealed trait Vector extends Serializable { * Find the index of a maximal element. Returns the first maximal element in case of a tie. * Returns -1 if vector has length 0. */ - @Since("2.0.0") def argmax: Int /** * Converts the vector to a JSON string. */ - @Since("2.0.0") def toJson: String } @@ -180,14 +165,12 @@ sealed trait Vector extends Serializable { * We don't use the name `Vector` because Scala imports * [[scala.collection.immutable.Vector]] by default. */ -@Since("2.0.0") object Vectors { /** * Creates a dense vector from its values. */ @varargs - @Since("2.0.0") def dense(firstValue: Double, otherValues: Double*): Vector = new DenseVector((firstValue +: otherValues).toArray) @@ -195,7 +178,6 @@ object Vectors { /** * Creates a dense vector from a double array. */ - @Since("2.0.0") def dense(values: Array[Double]): Vector = new DenseVector(values) /** @@ -205,7 +187,6 @@ object Vectors { * @param indices index array, must be strictly increasing. * @param values value array, must have the same length as indices. */ - @Since("2.0.0") def sparse(size: Int, indices: Array[Int], values: Array[Double]): Vector = new SparseVector(size, indices, values) @@ -215,7 +196,6 @@ object Vectors { * @param size vector size. * @param elements vector elements in (index, value) pairs. */ - @Since("2.0.0") def sparse(size: Int, elements: Seq[(Int, Double)]): Vector = { require(size > 0, "The size of the requested sparse vector must be greater than 0.") @@ -237,7 +217,6 @@ object Vectors { * @param size vector size. * @param elements vector elements in (index, value) pairs. */ - @Since("2.0.0") def sparse(size: Int, elements: JavaIterable[(JavaInteger, JavaDouble)]): Vector = { sparse(size, elements.asScala.map { case (i, x) => (i.intValue(), x.doubleValue()) @@ -250,7 +229,6 @@ object Vectors { * @param size vector size * @return a zero vector */ - @Since("2.0.0") def zeros(size: Int): Vector = { new DenseVector(new Array[Double](size)) } @@ -258,7 +236,6 @@ object Vectors { /** * Parses the JSON representation of a vector into a [[Vector]]. */ - @Since("2.0.0") def fromJson(json: String): Vector = { implicit val formats = DefaultFormats val jValue = parseJson(json) @@ -304,7 +281,6 @@ object Vectors { * @param p norm. * @return norm in L^p^ space. */ - @Since("2.0.0") def norm(vector: Vector, p: Double): Double = { require(p >= 1.0, "To compute the p-norm of the vector, we require that you specify a p>=1. " + s"You specified p=$p.") @@ -357,7 +333,6 @@ object Vectors { * @param v2 second Vector. * @return squared distance between two Vectors. */ - @Since("2.0.0") def sqdist(v1: Vector, v2: Vector): Double = { require(v1.size == v2.size, s"Vector dimensions do not match: Dim(v1)=${v1.size} and Dim(v2)" + s"=${v2.size}.") @@ -474,8 +449,7 @@ object Vectors { /** * A dense vector represented by a value array. */ -@Since("2.0.0") -class DenseVector @Since("2.0.0") (@Since("2.0.0") val values: Array[Double]) extends Vector { +class DenseVector (val values: Array[Double]) extends Vector { override def size: Int = values.length @@ -574,11 +548,9 @@ class DenseVector @Since("2.0.0") (@Since("2.0.0") val values: Array[Double]) ex } } -@Since("2.0.0") object DenseVector { /** Extracts the value array from a dense vector. */ - @Since("2.0.0") def unapply(dv: DenseVector): Option[Array[Double]] = Some(dv.values) } @@ -589,11 +561,10 @@ object DenseVector { * @param indices index array, assume to be strictly increasing. * @param values value array, must have the same length as the index array. */ -@Since("2.0.0") -class SparseVector @Since("2.0.0") ( +class SparseVector ( override val size: Int, - @Since("2.0.0") val indices: Array[Int], - @Since("2.0.0") val values: Array[Double]) extends Vector { + val indices: Array[Int], + val values: Array[Double]) extends Vector { require(indices.length == values.length, "Sparse vectors require that the dimension of the" + s" indices match the dimension of the values. You provided ${indices.length} indices and " + @@ -763,9 +734,7 @@ class SparseVector @Since("2.0.0") ( } } -@Since("2.0.0") object SparseVector { - @Since("2.0.0") def unapply(sv: SparseVector): Option[(Int, Array[Int], Array[Double])] = Some((sv.size, sv.indices, sv.values)) } diff --git a/mllib/pom.xml b/mllib/pom.xml index c324afb2a2..24d8274e22 100644 --- a/mllib/pom.xml +++ b/mllib/pom.xml @@ -116,7 +116,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/pom.xml b/pom.xml index 20615d599a..34c374d669 100644 --- a/pom.xml +++ b/pom.xml @@ -284,14 +284,8 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} ${project.version} - - - org.apache.spark - spark-tags_${scala.binary.version} - ${project.version} - tests test diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala index 26a3760bc3..6fc49a08fe 100644 --- a/project/MimaExcludes.scala +++ b/project/MimaExcludes.scala @@ -700,10 +700,6 @@ object MimaExcludes { ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.executor.ShuffleReadMetrics.localBlocksFetched"), ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.remoteBlocksFetched"), ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.localBlocksFetched") - ) ++ Seq( - // [SPARK-14613] Add @Since into the matrix and vector classes in spark-mllib-local - ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package$"), - ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package") ) case v if v.startsWith("1.6") => Seq( diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index ecd08defd2..ffbca25e46 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -50,10 +50,10 @@ object BuildCommons { ).map(ProjectRef(buildLocation, _)) val allProjects@Seq( - core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, _* + core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, testTags, sketch, _* ) = Seq( "core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe", - "tags", "sketch" + "test-tags", "sketch" ).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects val optionallyEnabledProjects@Seq(yarn, java8Tests, sparkGangliaLgpl, @@ -336,7 +336,7 @@ object SparkBuild extends PomBuild { val mimaProjects = allProjects.filterNot { x => Seq( spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn, - unsafe, tags, sketch, mllibLocal + unsafe, testTags, sketch, mllibLocal ).contains(x) } @@ -680,9 +680,9 @@ object Unidoc { publish := {}, unidocProjectFilter in(ScalaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags), + inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags), unidocProjectFilter in(JavaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags), + inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags), // Skip actual catalyst, but include the subproject. // Catalyst is not public API and contains quasiquotes which break scaladoc. diff --git a/repl/pom.xml b/repl/pom.xml index c12d121c61..0f396c9b80 100644 --- a/repl/pom.xml +++ b/repl/pom.xml @@ -87,7 +87,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} org.apache.xbean diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml index 1923199f4b..1748fa2778 100644 --- a/sql/catalyst/pom.xml +++ b/sql/catalyst/pom.xml @@ -55,7 +55,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} org.apache.spark diff --git a/sql/core/pom.xml b/sql/core/pom.xml index d745e0fb43..e1071ebfb5 100644 --- a/sql/core/pom.xml +++ b/sql/core/pom.xml @@ -73,7 +73,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} org.apache.parquet diff --git a/sql/hive-thriftserver/pom.xml b/sql/hive-thriftserver/pom.xml index eb71507855..c8d17bd468 100644 --- a/sql/hive-thriftserver/pom.xml +++ b/sql/hive-thriftserver/pom.xml @@ -95,7 +95,7 @@ org.apache.spark - spark-tags_${scala.binary.version} + spark-test-tags_${scala.binary.version} diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml index efffb56799..177b6884fa 100644 --- a/sql/hive/pom.xml +++ b/sql/hive/pom.xml @@ -60,9 +60,7 @@ org.apache.spark - spark-tags_${scala.binary.version} - tests - test + spark-test-tags_${scala.binary.version} diff --git a/yarn/pom.xml b/yarn/pom.xml index d0b6077560..328bb6678d 100644 --- a/yarn/pom.xml +++ b/yarn/pom.xml @@ -53,9 +53,7 @@ org.apache.spark - spark-tags_${scala.binary.version} - test - tests + spark-test-tags_${scala.binary.version} org.apache.hadoop