[SPARK-26127][ML] Remove deprecated setters from tree regression and classification models

## What changes were proposed in this pull request?

The setter methods are deprecated since 2.1 for the models of regression and classification using trees. The deprecation was stating that the method would have been removed in 3.0. Hence the PR removes the deprecated method.

## How was this patch tested?

NA

Closes #23093 from mgaido91/SPARK-26127.

Authored-by: Marco Gaido <marcogaido91@gmail.com>
Signed-off-by: Sean Owen <sean.owen@databricks.com>
This commit is contained in:
Marco Gaido 2018-11-21 17:03:57 -06:00 committed by Sean Owen
parent 81550b38e4
commit 4aa9ccbde7
8 changed files with 138 additions and 178 deletions

View file

@ -55,27 +55,27 @@ class DecisionTreeClassifier @Since("1.4.0") (
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -87,15 +87,15 @@ class DecisionTreeClassifier @Since("1.4.0") (
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = set(impurity, value)
def setImpurity(value: String): this.type = set(impurity, value)
/** @group setParam */
@Since("1.6.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
override protected def train(
dataset: Dataset[_]): DecisionTreeClassificationModel = instrumented { instr =>

View file

@ -69,27 +69,27 @@ class GBTClassifier @Since("1.4.0") (
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -101,7 +101,7 @@ class GBTClassifier @Since("1.4.0") (
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/**
* The impurity setting is ignored for GBT models.
@ -110,7 +110,7 @@ class GBTClassifier @Since("1.4.0") (
* @group setParam
*/
@Since("1.4.0")
override def setImpurity(value: String): this.type = {
def setImpurity(value: String): this.type = {
logWarning("GBTClassifier.setImpurity should NOT be used")
this
}
@ -119,25 +119,25 @@ class GBTClassifier @Since("1.4.0") (
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
// Parameters from GBTParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxIter(value: Int): this.type = set(maxIter, value)
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.4.0")
override def setStepSize(value: Double): this.type = set(stepSize, value)
def setStepSize(value: Double): this.type = set(stepSize, value)
/** @group setParam */
@Since("2.3.0")
override def setFeatureSubsetStrategy(value: String): this.type =
def setFeatureSubsetStrategy(value: String): this.type =
set(featureSubsetStrategy, value)
// Parameters from GBTClassifierParams:

View file

@ -57,27 +57,27 @@ class RandomForestClassifier @Since("1.4.0") (
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -89,31 +89,31 @@ class RandomForestClassifier @Since("1.4.0") (
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = set(impurity, value)
def setImpurity(value: String): this.type = set(impurity, value)
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
// Parameters from RandomForestParams:
/** @group setParam */
@Since("1.4.0")
override def setNumTrees(value: Int): this.type = set(numTrees, value)
def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group setParam */
@Since("1.4.0")
override def setFeatureSubsetStrategy(value: String): this.type =
def setFeatureSubsetStrategy(value: String): this.type =
set(featureSubsetStrategy, value)
override protected def train(

View file

@ -54,27 +54,27 @@ class DecisionTreeRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
// Override parameter setters from parent trait for Java API compatibility.
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -86,15 +86,15 @@ class DecisionTreeRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = set(impurity, value)
def setImpurity(value: String): this.type = set(impurity, value)
/** @group setParam */
@Since("1.6.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
/** @group setParam */
@Since("2.0.0")

View file

@ -34,7 +34,6 @@ import org.apache.spark.ml.util.DefaultParamsReader.Metadata
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo}
import org.apache.spark.mllib.tree.model.{GradientBoostedTreesModel => OldGBTModel}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions._
@ -69,27 +68,27 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -101,7 +100,7 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/**
* The impurity setting is ignored for GBT models.
@ -110,7 +109,7 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
* @group setParam
*/
@Since("1.4.0")
override def setImpurity(value: String): this.type = {
def setImpurity(value: String): this.type = {
logWarning("GBTRegressor.setImpurity should NOT be used")
this
}
@ -119,21 +118,21 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
// Parameters from GBTParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxIter(value: Int): this.type = set(maxIter, value)
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.4.0")
override def setStepSize(value: Double): this.type = set(stepSize, value)
def setStepSize(value: Double): this.type = set(stepSize, value)
// Parameters from GBTRegressorParams:
@ -143,7 +142,7 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
/** @group setParam */
@Since("2.3.0")
override def setFeatureSubsetStrategy(value: String): this.type =
def setFeatureSubsetStrategy(value: String): this.type =
set(featureSubsetStrategy, value)
/** @group setParam */

View file

@ -56,27 +56,27 @@ class RandomForestRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = set(maxBins, value)
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
@ -88,31 +88,31 @@ class RandomForestRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = set(impurity, value)
def setImpurity(value: String): this.type = set(impurity, value)
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = set(seed, value)
def setSeed(value: Long): this.type = set(seed, value)
// Parameters from RandomForestParams:
/** @group setParam */
@Since("1.4.0")
override def setNumTrees(value: Int): this.type = set(numTrees, value)
def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group setParam */
@Since("1.4.0")
override def setFeatureSubsetStrategy(value: String): this.type =
def setFeatureSubsetStrategy(value: String): this.type =
set(featureSubsetStrategy, value)
override protected def train(

View file

@ -110,80 +110,24 @@ private[ml] trait DecisionTreeParams extends PredictorParams
setDefault(maxDepth -> 5, maxBins -> 32, minInstancesPerNode -> 1, minInfoGain -> 0.0,
maxMemoryInMB -> 256, cacheNodeIds -> false, checkpointInterval -> 10)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group getParam */
final def getMaxDepth: Int = $(maxDepth)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group getParam */
final def getMaxBins: Int = $(maxBins)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group getParam */
final def getMinInstancesPerNode: Int = $(minInstancesPerNode)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group getParam */
final def getMinInfoGain: Double = $(minInfoGain)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group expertSetParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertGetParam */
final def getMaxMemoryInMB: Int = $(maxMemoryInMB)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group expertSetParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/** @group expertGetParam */
final def getCacheNodeIds: Boolean = $(cacheNodeIds)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** (private[ml]) Create a Strategy instance to use with the old API. */
private[ml] def getOldStrategy(
categoricalFeatures: Map[Int, Int],
@ -226,13 +170,6 @@ private[ml] trait TreeClassifierParams extends Params {
setDefault(impurity -> "gini")
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setImpurity(value: String): this.type = set(impurity, value)
/** @group getParam */
final def getImpurity: String = $(impurity).toLowerCase(Locale.ROOT)
@ -273,13 +210,6 @@ private[ml] trait HasVarianceImpurity extends Params {
setDefault(impurity -> "variance")
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setImpurity(value: String): this.type = set(impurity, value)
/** @group getParam */
final def getImpurity: String = $(impurity).toLowerCase(Locale.ROOT)
@ -346,13 +276,6 @@ private[ml] trait TreeEnsembleParams extends DecisionTreeParams {
setDefault(subsamplingRate -> 1.0)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group getParam */
final def getSubsamplingRate: Double = $(subsamplingRate)
@ -406,13 +329,6 @@ private[ml] trait TreeEnsembleParams extends DecisionTreeParams {
setDefault(featureSubsetStrategy -> "auto")
/**
* @deprecated This method is deprecated and will be removed in 3.0.0
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setFeatureSubsetStrategy(value: String): this.type = set(featureSubsetStrategy, value)
/** @group getParam */
final def getFeatureSubsetStrategy: String = $(featureSubsetStrategy).toLowerCase(Locale.ROOT)
}
@ -440,13 +356,6 @@ private[ml] trait RandomForestParams extends TreeEnsembleParams {
setDefault(numTrees -> 20)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group getParam */
final def getNumTrees: Int = $(numTrees)
}
@ -491,13 +400,6 @@ private[ml] trait GBTParams extends TreeEnsembleParams with HasMaxIter with HasS
@Since("2.4.0")
final def getValidationTol: Double = $(validationTol)
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/**
* Param for Step size (a.k.a. learning rate) in interval (0, 1] for shrinking
* the contribution of each estimator.
@ -508,13 +410,6 @@ private[ml] trait GBTParams extends TreeEnsembleParams with HasMaxIter with HasS
"(a.k.a. learning rate) in interval (0, 1] for shrinking the contribution of each estimator.",
ParamValidators.inRange(0, 1, lowerInclusive = false, upperInclusive = true))
/**
* @deprecated This method is deprecated and will be removed in 3.0.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 3.0.0.", "2.1.0")
def setStepSize(value: Double): this.type = set(stepSize, value)
setDefault(maxIter -> 20, stepSize -> 0.1, validationTol -> 0.01)
setDefault(featureSubsetStrategy -> "all")

View file

@ -36,6 +36,76 @@ object MimaExcludes {
// Exclude rules for 3.0.x
lazy val v30excludes = v24excludes ++ Seq(
// [SPARK-26127] Remove deprecated setters from tree regression and classification models
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.DecisionTreeClassificationModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setSubsamplingRate"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMaxIter"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setStepSize"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setFeatureSubsetStrategy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setSubsamplingRate"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setFeatureSubsetStrategy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.classification.RandomForestClassificationModel.setNumTrees"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setSubsamplingRate"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMaxIter"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setStepSize"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setFeatureSubsetStrategy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setSeed"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setMinInfoGain"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setSubsamplingRate"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setCacheNodeIds"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setCheckpointInterval"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setMaxDepth"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setImpurity"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setMaxMemoryInMB"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setFeatureSubsetStrategy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setMaxBins"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setMinInstancesPerNode"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setNumTrees"),
// [SPARK-26124] Update plugins, including MiMa
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.sql.sources.v2.reader.SupportsPushDownRequiredColumns.build"),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.sql.sources.v2.reader.SupportsReportStatistics.fullSchema"),
@ -50,15 +120,11 @@ object MimaExcludes {
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.ml.feature.LabeledPointBeanInfo"),
// [SPARK-25959] GBTClassifier picks wrong impurity stats on loading
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.ml.classification.GBTClassificationModel.setImpurity"),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.tree.HasVarianceImpurity.org$apache$spark$ml$tree$HasVarianceImpurity$_setter_$impurity_="),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.tree.HasVarianceImpurity.org$apache$spark$ml$tree$HasVarianceImpurity$_setter_$impurity_="),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.tree.HasVarianceImpurity.org$apache$spark$ml$tree$HasVarianceImpurity$_setter_$impurity_="),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.tree.HasVarianceImpurity.org$apache$spark$ml$tree$HasVarianceImpurity$_setter_$impurity_="),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.tree.HasVarianceImpurity.org$apache$spark$ml$tree$HasVarianceImpurity$_setter_$impurity_="),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.ml.regression.DecisionTreeRegressionModel.setImpurity"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.ml.regression.GBTRegressionModel.setImpurity"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.ml.regression.RandomForestRegressionModel.setImpurity"),
// [SPARK-25908][CORE][SQL] Remove old deprecated items in Spark 3
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.BarrierTaskContext.isRunningLocally"),