Skip to content
Snippets Groups Projects
Commit 95f79850 authored by Yanbo Liang's avatar Yanbo Liang Committed by Joseph K. Bradley
Browse files

[SPARK-18592][ML] Move DT/RF/GBT Param setter methods to subclasses

## What changes were proposed in this pull request?
Mainly two changes:
* Move DT/RF/GBT Param setter methods to subclasses.
* Deprecate corresponding setter methods in the model classes.

See discussion here https://github.com/apache/spark/pull/15913#discussion_r89662469.

## How was this patch tested?
Existing tests.

Author: Yanbo Liang <ybliang8@gmail.com>

Closes #16017 from yanboliang/spark-18592.
parent 1a870090
No related branches found
No related tags found
No related merge requests found
......@@ -52,33 +52,49 @@ class DecisionTreeClassifier @Since("1.4.0") (
// Override parameter setters from parent trait for Java API compatibility.
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = super.setImpurity(value)
override def setImpurity(value: String): this.type = set(impurity, value)
/** @group setParam */
@Since("1.6.0")
override def setSeed(value: Long): this.type = super.setSeed(value)
override def setSeed(value: Long): this.type = set(seed, value)
override protected def train(dataset: Dataset[_]): DecisionTreeClassificationModel = {
val categoricalFeatures: Map[Int, Int] =
......
......@@ -69,31 +69,47 @@ class GBTClassifier @Since("1.4.0") (
// Parameters from TreeClassifierParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/**
* The impurity setting is ignored for GBT models.
* Individual trees are built using impurity "Variance."
*
* @group setParam
*/
@Since("1.4.0")
override def setImpurity(value: String): this.type = {
......@@ -103,19 +119,23 @@ class GBTClassifier @Since("1.4.0") (
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = super.setSeed(value)
override def setSeed(value: Long): this.type = set(seed, value)
// Parameters from GBTParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxIter(value: Int): this.type = super.setMaxIter(value)
override def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.4.0")
override def setStepSize(value: Double): this.type = super.setStepSize(value)
override def setStepSize(value: Double): this.type = set(stepSize, value)
// Parameters from GBTClassifierParams:
......
......@@ -54,47 +54,66 @@ class RandomForestClassifier @Since("1.4.0") (
// Parameters from TreeClassifierParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = super.setImpurity(value)
override def setImpurity(value: String): this.type = set(impurity, value)
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = super.setSeed(value)
override def setSeed(value: Long): this.type = set(seed, value)
// Parameters from RandomForestParams:
/** @group setParam */
@Since("1.4.0")
override def setNumTrees(value: Int): this.type = super.setNumTrees(value)
override def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group setParam */
@Since("1.4.0")
override def setFeatureSubsetStrategy(value: String): this.type =
super.setFeatureSubsetStrategy(value)
set(featureSubsetStrategy, value)
override protected def train(dataset: Dataset[_]): RandomForestClassificationModel = {
val categoricalFeatures: Map[Int, Int] =
......
......@@ -51,34 +51,52 @@ class DecisionTreeRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
def this() = this(Identifiable.randomUID("dtr"))
// Override parameter setters from parent trait for Java API compatibility.
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = super.setImpurity(value)
override def setImpurity(value: String): this.type = set(impurity, value)
override def setSeed(value: Long): this.type = super.setSeed(value)
/** @group setParam */
@Since("1.6.0")
override def setSeed(value: Long): this.type = set(seed, value)
/** @group setParam */
@Since("2.0.0")
def setVarianceCol(value: String): this.type = set(varianceCol, value)
override protected def train(dataset: Dataset[_]): DecisionTreeRegressionModel = {
......
......@@ -65,31 +65,48 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
// Override parameter setters from parent trait for Java API compatibility.
// Parameters from TreeRegressorParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/**
* The impurity setting is ignored for GBT models.
* Individual trees are built using impurity "Variance."
*
* @group setParam
*/
@Since("1.4.0")
override def setImpurity(value: String): this.type = {
......@@ -98,18 +115,24 @@ class GBTRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: String)
}
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = super.setSeed(value)
override def setSeed(value: Long): this.type = set(seed, value)
// Parameters from GBTParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxIter(value: Int): this.type = super.setMaxIter(value)
override def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.4.0")
override def setStepSize(value: Double): this.type = super.setStepSize(value)
override def setStepSize(value: Double): this.type = set(stepSize, value)
// Parameters from GBTRegressorParams:
......
......@@ -52,45 +52,67 @@ class RandomForestRegressor @Since("1.4.0") (@Since("1.4.0") override val uid: S
// Override parameter setters from parent trait for Java API compatibility.
// Parameters from TreeRegressorParams:
/** @group setParam */
@Since("1.4.0")
override def setMaxDepth(value: Int): this.type = super.setMaxDepth(value)
override def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
override def setMaxBins(value: Int): this.type = super.setMaxBins(value)
override def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInstancesPerNode(value: Int): this.type =
super.setMinInstancesPerNode(value)
override def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
override def setMinInfoGain(value: Double): this.type = super.setMinInfoGain(value)
override def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setMaxMemoryInMB(value: Int): this.type = super.setMaxMemoryInMB(value)
override def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
override def setCacheNodeIds(value: Boolean): this.type = super.setCacheNodeIds(value)
override def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
override def setCheckpointInterval(value: Int): this.type = super.setCheckpointInterval(value)
override def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
override def setImpurity(value: String): this.type = super.setImpurity(value)
override def setImpurity(value: String): this.type = set(impurity, value)
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
override def setSubsamplingRate(value: Double): this.type = super.setSubsamplingRate(value)
override def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
override def setSeed(value: Long): this.type = super.setSeed(value)
override def setSeed(value: Long): this.type = set(seed, value)
// Parameters from RandomForestParams:
/** @group setParam */
@Since("1.4.0")
override def setNumTrees(value: Int): this.type = super.setNumTrees(value)
override def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group setParam */
@Since("1.4.0")
override def setFeatureSubsetStrategy(value: String): this.type =
super.setFeatureSubsetStrategy(value)
set(featureSubsetStrategy, value)
override protected def train(dataset: Dataset[_]): RandomForestRegressionModel = {
val categoricalFeatures: Map[Int, Int] =
......
......@@ -107,54 +107,78 @@ private[ml] trait DecisionTreeParams extends PredictorParams
setDefault(maxDepth -> 5, maxBins -> 32, minInstancesPerNode -> 1, minInfoGain -> 0.0,
maxMemoryInMB -> 256, cacheNodeIds -> false, checkpointInterval -> 10)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group getParam */
final def getMaxDepth: Int = $(maxDepth)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group getParam */
final def getMaxBins: Int = $(maxBins)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group getParam */
final def getMinInstancesPerNode: Int = $(minInstancesPerNode)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group getParam */
final def getMinInfoGain: Double = $(minInfoGain)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group expertSetParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group expertSetParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertGetParam */
final def getMaxMemoryInMB: Int = $(maxMemoryInMB)
/** @group expertSetParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group expertSetParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/** @group expertGetParam */
final def getCacheNodeIds: Boolean = $(cacheNodeIds)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be >= 1.
* (default = 10)
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** (private[ml]) Create a Strategy instance to use with the old API. */
......@@ -198,7 +222,11 @@ private[ml] trait TreeClassifierParams extends Params {
setDefault(impurity -> "gini")
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setImpurity(value: String): this.type = set(impurity, value)
/** @group getParam */
......@@ -243,7 +271,11 @@ private[ml] trait TreeRegressorParams extends Params {
setDefault(impurity -> "variance")
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setImpurity(value: String): this.type = set(impurity, value)
/** @group getParam */
......@@ -300,7 +332,11 @@ private[ml] trait TreeEnsembleParams extends DecisionTreeParams {
setDefault(subsamplingRate -> 1.0)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group getParam */
......@@ -340,7 +376,11 @@ private[ml] trait RandomForestParams extends TreeEnsembleParams {
setDefault(numTrees -> 20)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group getParam */
......@@ -383,7 +423,11 @@ private[ml] trait RandomForestParams extends TreeEnsembleParams {
setDefault(featureSubsetStrategy -> "auto")
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setFeatureSubsetStrategy(value: String): this.type = set(featureSubsetStrategy, value)
/** @group getParam */
......@@ -420,7 +464,11 @@ private[ml] trait GBTParams extends TreeEnsembleParams with HasMaxIter {
// final val validationTol: DoubleParam = new DoubleParam(this, "validationTol", "")
// validationTol -> 1e-5
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/**
......@@ -436,7 +484,11 @@ private[ml] trait GBTParams extends TreeEnsembleParams with HasMaxIter {
/** @group getParam */
final def getStepSize: Double = $(stepSize)
/** @group setParam */
/**
* @deprecated This method is deprecated and will be removed in 2.2.0.
* @group setParam
*/
@deprecated("This method is deprecated and will be removed in 2.2.0.", "2.1.0")
def setStepSize(value: Double): this.type = set(stepSize, value)
setDefault(maxIter -> 20, stepSize -> 0.1)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment