Skip to content
Snippets Groups Projects
Commit c3a54843 authored by Xiangrui Meng's avatar Xiangrui Meng
Browse files

[SPARK-10240] [SPARK-10242] [MLLIB] update since versions in mlilb.random and mllib.stat

The same as #8241 but for `mllib.stat` and `mllib.random`.

cc feynmanliang

Author: Xiangrui Meng <meng@databricks.com>

Closes #8439 from mengxr/SPARK-10242.
parent ab431f8a
No related branches found
No related tags found
No related merge requests found
......@@ -20,7 +20,7 @@ package org.apache.spark.mllib.random
import org.apache.commons.math3.distribution.{ExponentialDistribution,
GammaDistribution, LogNormalDistribution, PoissonDistribution}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.annotation.{Since, DeveloperApi}
import org.apache.spark.util.random.{XORShiftRandom, Pseudorandom}
/**
......@@ -28,17 +28,20 @@ import org.apache.spark.util.random.{XORShiftRandom, Pseudorandom}
* Trait for random data generators that generate i.i.d. data.
*/
@DeveloperApi
@Since("1.1.0")
trait RandomDataGenerator[T] extends Pseudorandom with Serializable {
/**
* Returns an i.i.d. sample as a generic type from an underlying distribution.
*/
@Since("1.1.0")
def nextValue(): T
/**
* Returns a copy of the RandomDataGenerator with a new instance of the rng object used in the
* class when applicable for non-locking concurrent usage.
*/
@Since("1.1.0")
def copy(): RandomDataGenerator[T]
}
......@@ -47,17 +50,21 @@ trait RandomDataGenerator[T] extends Pseudorandom with Serializable {
* Generates i.i.d. samples from U[0.0, 1.0]
*/
@DeveloperApi
@Since("1.1.0")
class UniformGenerator extends RandomDataGenerator[Double] {
// XORShiftRandom for better performance. Thread safety isn't necessary here.
private val random = new XORShiftRandom()
@Since("1.1.0")
override def nextValue(): Double = {
random.nextDouble()
}
@Since("1.1.0")
override def setSeed(seed: Long): Unit = random.setSeed(seed)
@Since("1.1.0")
override def copy(): UniformGenerator = new UniformGenerator()
}
......@@ -66,17 +73,21 @@ class UniformGenerator extends RandomDataGenerator[Double] {
* Generates i.i.d. samples from the standard normal distribution.
*/
@DeveloperApi
@Since("1.1.0")
class StandardNormalGenerator extends RandomDataGenerator[Double] {
// XORShiftRandom for better performance. Thread safety isn't necessary here.
private val random = new XORShiftRandom()
@Since("1.1.0")
override def nextValue(): Double = {
random.nextGaussian()
}
@Since("1.1.0")
override def setSeed(seed: Long): Unit = random.setSeed(seed)
@Since("1.1.0")
override def copy(): StandardNormalGenerator = new StandardNormalGenerator()
}
......@@ -87,16 +98,21 @@ class StandardNormalGenerator extends RandomDataGenerator[Double] {
* @param mean mean for the Poisson distribution.
*/
@DeveloperApi
class PoissonGenerator(val mean: Double) extends RandomDataGenerator[Double] {
@Since("1.1.0")
class PoissonGenerator @Since("1.1.0") (
@Since("1.1.0") val mean: Double) extends RandomDataGenerator[Double] {
private val rng = new PoissonDistribution(mean)
@Since("1.1.0")
override def nextValue(): Double = rng.sample()
@Since("1.1.0")
override def setSeed(seed: Long) {
rng.reseedRandomGenerator(seed)
}
@Since("1.1.0")
override def copy(): PoissonGenerator = new PoissonGenerator(mean)
}
......@@ -107,16 +123,21 @@ class PoissonGenerator(val mean: Double) extends RandomDataGenerator[Double] {
* @param mean mean for the exponential distribution.
*/
@DeveloperApi
class ExponentialGenerator(val mean: Double) extends RandomDataGenerator[Double] {
@Since("1.3.0")
class ExponentialGenerator @Since("1.3.0") (
@Since("1.3.0") val mean: Double) extends RandomDataGenerator[Double] {
private val rng = new ExponentialDistribution(mean)
@Since("1.3.0")
override def nextValue(): Double = rng.sample()
@Since("1.3.0")
override def setSeed(seed: Long) {
rng.reseedRandomGenerator(seed)
}
@Since("1.3.0")
override def copy(): ExponentialGenerator = new ExponentialGenerator(mean)
}
......@@ -128,16 +149,22 @@ class ExponentialGenerator(val mean: Double) extends RandomDataGenerator[Double]
* @param scale scale for the gamma distribution
*/
@DeveloperApi
class GammaGenerator(val shape: Double, val scale: Double) extends RandomDataGenerator[Double] {
@Since("1.3.0")
class GammaGenerator @Since("1.3.0") (
@Since("1.3.0") val shape: Double,
@Since("1.3.0") val scale: Double) extends RandomDataGenerator[Double] {
private val rng = new GammaDistribution(shape, scale)
@Since("1.3.0")
override def nextValue(): Double = rng.sample()
@Since("1.3.0")
override def setSeed(seed: Long) {
rng.reseedRandomGenerator(seed)
}
@Since("1.3.0")
override def copy(): GammaGenerator = new GammaGenerator(shape, scale)
}
......@@ -150,15 +177,21 @@ class GammaGenerator(val shape: Double, val scale: Double) extends RandomDataGen
* @param std standard deviation for the log normal distribution
*/
@DeveloperApi
class LogNormalGenerator(val mean: Double, val std: Double) extends RandomDataGenerator[Double] {
@Since("1.3.0")
class LogNormalGenerator @Since("1.3.0") (
@Since("1.3.0") val mean: Double,
@Since("1.3.0") val std: Double) extends RandomDataGenerator[Double] {
private val rng = new LogNormalDistribution(mean, std)
@Since("1.3.0")
override def nextValue(): Double = rng.sample()
@Since("1.3.0")
override def setSeed(seed: Long) {
rng.reseedRandomGenerator(seed)
}
@Since("1.3.0")
override def copy(): LogNormalGenerator = new LogNormalGenerator(mean, std)
}
......@@ -20,7 +20,7 @@ package org.apache.spark.mllib.random
import scala.reflect.ClassTag
import org.apache.spark.SparkContext
import org.apache.spark.annotation.{DeveloperApi, Experimental}
import org.apache.spark.annotation.{DeveloperApi, Experimental, Since}
import org.apache.spark.api.java.{JavaDoubleRDD, JavaRDD, JavaSparkContext}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.rdd.{RandomRDD, RandomVectorRDD}
......@@ -32,6 +32,7 @@ import org.apache.spark.util.Utils
* Generator methods for creating RDDs comprised of `i.i.d.` samples from some distribution.
*/
@Experimental
@Since("1.1.0")
object RandomRDDs {
/**
......@@ -46,6 +47,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ `U(0.0, 1.0)`.
*/
@Since("1.1.0")
def uniformRDD(
sc: SparkContext,
size: Long,
......@@ -58,6 +60,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#uniformRDD]].
*/
@Since("1.1.0")
def uniformJavaRDD(
jsc: JavaSparkContext,
size: Long,
......@@ -69,6 +72,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#uniformJavaRDD]] with the default seed.
*/
@Since("1.1.0")
def uniformJavaRDD(jsc: JavaSparkContext, size: Long, numPartitions: Int): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(uniformRDD(jsc.sc, size, numPartitions))
}
......@@ -76,6 +80,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#uniformJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def uniformJavaRDD(jsc: JavaSparkContext, size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(uniformRDD(jsc.sc, size))
}
......@@ -92,6 +97,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ N(0.0, 1.0).
*/
@Since("1.1.0")
def normalRDD(
sc: SparkContext,
size: Long,
......@@ -104,6 +110,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#normalRDD]].
*/
@Since("1.1.0")
def normalJavaRDD(
jsc: JavaSparkContext,
size: Long,
......@@ -115,6 +122,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#normalJavaRDD]] with the default seed.
*/
@Since("1.1.0")
def normalJavaRDD(jsc: JavaSparkContext, size: Long, numPartitions: Int): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(normalRDD(jsc.sc, size, numPartitions))
}
......@@ -122,6 +130,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#normalJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def normalJavaRDD(jsc: JavaSparkContext, size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(normalRDD(jsc.sc, size))
}
......@@ -137,6 +146,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ Pois(mean).
*/
@Since("1.1.0")
def poissonRDD(
sc: SparkContext,
mean: Double,
......@@ -150,6 +160,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#poissonRDD]].
*/
@Since("1.1.0")
def poissonJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -162,6 +173,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#poissonJavaRDD]] with the default seed.
*/
@Since("1.1.0")
def poissonJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -173,6 +185,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#poissonJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def poissonJavaRDD(jsc: JavaSparkContext, mean: Double, size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(poissonRDD(jsc.sc, mean, size))
}
......@@ -188,6 +201,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ Pois(mean).
*/
@Since("1.3.0")
def exponentialRDD(
sc: SparkContext,
mean: Double,
......@@ -201,6 +215,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#exponentialRDD]].
*/
@Since("1.3.0")
def exponentialJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -213,6 +228,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#exponentialJavaRDD]] with the default seed.
*/
@Since("1.3.0")
def exponentialJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -224,6 +240,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#exponentialJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.3.0")
def exponentialJavaRDD(jsc: JavaSparkContext, mean: Double, size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(exponentialRDD(jsc.sc, mean, size))
}
......@@ -240,6 +257,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ Pois(mean).
*/
@Since("1.3.0")
def gammaRDD(
sc: SparkContext,
shape: Double,
......@@ -254,6 +272,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#gammaRDD]].
*/
@Since("1.3.0")
def gammaJavaRDD(
jsc: JavaSparkContext,
shape: Double,
......@@ -267,6 +286,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#gammaJavaRDD]] with the default seed.
*/
@Since("1.3.0")
def gammaJavaRDD(
jsc: JavaSparkContext,
shape: Double,
......@@ -279,11 +299,12 @@ object RandomRDDs {
/**
* [[RandomRDDs#gammaJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.3.0")
def gammaJavaRDD(
jsc: JavaSparkContext,
shape: Double,
scale: Double,
size: Long): JavaDoubleRDD = {
jsc: JavaSparkContext,
shape: Double,
scale: Double,
size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(gammaRDD(jsc.sc, shape, scale, size))
}
......@@ -299,6 +320,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Double] comprised of `i.i.d.` samples ~ Pois(mean).
*/
@Since("1.3.0")
def logNormalRDD(
sc: SparkContext,
mean: Double,
......@@ -313,6 +335,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#logNormalRDD]].
*/
@Since("1.3.0")
def logNormalJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -326,6 +349,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#logNormalJavaRDD]] with the default seed.
*/
@Since("1.3.0")
def logNormalJavaRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -338,11 +362,12 @@ object RandomRDDs {
/**
* [[RandomRDDs#logNormalJavaRDD]] with the default number of partitions and the default seed.
*/
@Since("1.3.0")
def logNormalJavaRDD(
jsc: JavaSparkContext,
mean: Double,
std: Double,
size: Long): JavaDoubleRDD = {
jsc: JavaSparkContext,
mean: Double,
std: Double,
size: Long): JavaDoubleRDD = {
JavaDoubleRDD.fromRDD(logNormalRDD(jsc.sc, mean, std, size))
}
......@@ -359,6 +384,7 @@ object RandomRDDs {
* @return RDD[Double] comprised of `i.i.d.` samples produced by generator.
*/
@DeveloperApi
@Since("1.1.0")
def randomRDD[T: ClassTag](
sc: SparkContext,
generator: RandomDataGenerator[T],
......@@ -381,6 +407,7 @@ object RandomRDDs {
* @param seed Seed for the RNG that generates the seed for the generator in each partition.
* @return RDD[Vector] with vectors containing i.i.d samples ~ `U(0.0, 1.0)`.
*/
@Since("1.1.0")
def uniformVectorRDD(
sc: SparkContext,
numRows: Long,
......@@ -394,6 +421,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#uniformVectorRDD]].
*/
@Since("1.1.0")
def uniformJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -406,6 +434,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#uniformJavaVectorRDD]] with the default seed.
*/
@Since("1.1.0")
def uniformJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -417,6 +446,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#uniformJavaVectorRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def uniformJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -435,6 +465,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Vector] with vectors containing `i.i.d.` samples ~ `N(0.0, 1.0)`.
*/
@Since("1.1.0")
def normalVectorRDD(
sc: SparkContext,
numRows: Long,
......@@ -448,6 +479,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#normalVectorRDD]].
*/
@Since("1.1.0")
def normalJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -460,6 +492,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#normalJavaVectorRDD]] with the default seed.
*/
@Since("1.1.0")
def normalJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -471,6 +504,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#normalJavaVectorRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def normalJavaVectorRDD(
jsc: JavaSparkContext,
numRows: Long,
......@@ -491,6 +525,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Vector] with vectors containing `i.i.d.` samples.
*/
@Since("1.3.0")
def logNormalVectorRDD(
sc: SparkContext,
mean: Double,
......@@ -507,6 +542,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#logNormalVectorRDD]].
*/
@Since("1.3.0")
def logNormalJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -521,6 +557,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#logNormalJavaVectorRDD]] with the default seed.
*/
@Since("1.3.0")
def logNormalJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -535,6 +572,7 @@ object RandomRDDs {
* [[RandomRDDs#logNormalJavaVectorRDD]] with the default number of partitions and
* the default seed.
*/
@Since("1.3.0")
def logNormalJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -556,6 +594,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Vector] with vectors containing `i.i.d.` samples ~ Pois(mean).
*/
@Since("1.1.0")
def poissonVectorRDD(
sc: SparkContext,
mean: Double,
......@@ -570,6 +609,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#poissonVectorRDD]].
*/
@Since("1.1.0")
def poissonJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -583,6 +623,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#poissonJavaVectorRDD]] with the default seed.
*/
@Since("1.1.0")
def poissonJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -595,6 +636,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#poissonJavaVectorRDD]] with the default number of partitions and the default seed.
*/
@Since("1.1.0")
def poissonJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -615,6 +657,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Vector] with vectors containing `i.i.d.` samples ~ Exp(mean).
*/
@Since("1.3.0")
def exponentialVectorRDD(
sc: SparkContext,
mean: Double,
......@@ -630,6 +673,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#exponentialVectorRDD]].
*/
@Since("1.3.0")
def exponentialJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -643,6 +687,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#exponentialJavaVectorRDD]] with the default seed.
*/
@Since("1.3.0")
def exponentialJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -656,6 +701,7 @@ object RandomRDDs {
* [[RandomRDDs#exponentialJavaVectorRDD]] with the default number of partitions
* and the default seed.
*/
@Since("1.3.0")
def exponentialJavaVectorRDD(
jsc: JavaSparkContext,
mean: Double,
......@@ -678,6 +724,7 @@ object RandomRDDs {
* @param seed Random seed (default: a random long integer).
* @return RDD[Vector] with vectors containing `i.i.d.` samples ~ Exp(mean).
*/
@Since("1.3.0")
def gammaVectorRDD(
sc: SparkContext,
shape: Double,
......@@ -693,6 +740,7 @@ object RandomRDDs {
/**
* Java-friendly version of [[RandomRDDs#gammaVectorRDD]].
*/
@Since("1.3.0")
def gammaJavaVectorRDD(
jsc: JavaSparkContext,
shape: Double,
......@@ -707,6 +755,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#gammaJavaVectorRDD]] with the default seed.
*/
@Since("1.3.0")
def gammaJavaVectorRDD(
jsc: JavaSparkContext,
shape: Double,
......@@ -720,6 +769,7 @@ object RandomRDDs {
/**
* [[RandomRDDs#gammaJavaVectorRDD]] with the default number of partitions and the default seed.
*/
@Since("1.3.0")
def gammaJavaVectorRDD(
jsc: JavaSparkContext,
shape: Double,
......@@ -744,6 +794,7 @@ object RandomRDDs {
* @return RDD[Vector] with vectors containing `i.i.d.` samples produced by generator.
*/
@DeveloperApi
@Since("1.1.0")
def randomVectorRDD(sc: SparkContext,
generator: RandomDataGenerator[Double],
numRows: Long,
......
......@@ -35,9 +35,9 @@ import org.apache.spark.mllib.util.MLUtils
*/
@Since("1.3.0")
@DeveloperApi
class MultivariateGaussian (
val mu: Vector,
val sigma: Matrix) extends Serializable {
class MultivariateGaussian @Since("1.3.0") (
@Since("1.3.0") val mu: Vector,
@Since("1.3.0") val sigma: Matrix) extends Serializable {
require(sigma.numCols == sigma.numRows, "Covariance matrix must be square")
require(mu.size == sigma.numCols, "Mean vector length must match covariance matrix size")
......
......@@ -17,7 +17,7 @@
package org.apache.spark.mllib.stat.test
import org.apache.spark.annotation.Experimental
import org.apache.spark.annotation.{Experimental, Since}
/**
* :: Experimental ::
......@@ -25,28 +25,33 @@ import org.apache.spark.annotation.Experimental
* @tparam DF Return type of `degreesOfFreedom`.
*/
@Experimental
@Since("1.1.0")
trait TestResult[DF] {
/**
* The probability of obtaining a test statistic result at least as extreme as the one that was
* actually observed, assuming that the null hypothesis is true.
*/
@Since("1.1.0")
def pValue: Double
/**
* Returns the degree(s) of freedom of the hypothesis test.
* Return type should be Number(e.g. Int, Double) or tuples of Numbers for toString compatibility.
*/
@Since("1.1.0")
def degreesOfFreedom: DF
/**
* Test statistic.
*/
@Since("1.1.0")
def statistic: Double
/**
* Null hypothesis of the test.
*/
@Since("1.1.0")
def nullHypothesis: String
/**
......@@ -78,11 +83,12 @@ trait TestResult[DF] {
* Object containing the test results for the chi-squared hypothesis test.
*/
@Experimental
@Since("1.1.0")
class ChiSqTestResult private[stat] (override val pValue: Double,
override val degreesOfFreedom: Int,
override val statistic: Double,
val method: String,
override val nullHypothesis: String) extends TestResult[Int] {
@Since("1.1.0") override val degreesOfFreedom: Int,
@Since("1.1.0") override val statistic: Double,
@Since("1.1.0") val method: String,
@Since("1.1.0") override val nullHypothesis: String) extends TestResult[Int] {
override def toString: String = {
"Chi squared test summary:\n" +
......@@ -96,11 +102,13 @@ class ChiSqTestResult private[stat] (override val pValue: Double,
* Object containing the test results for the Kolmogorov-Smirnov test.
*/
@Experimental
@Since("1.5.0")
class KolmogorovSmirnovTestResult private[stat] (
override val pValue: Double,
override val statistic: Double,
override val nullHypothesis: String) extends TestResult[Int] {
@Since("1.5.0") override val pValue: Double,
@Since("1.5.0") override val statistic: Double,
@Since("1.5.0") override val nullHypothesis: String) extends TestResult[Int] {
@Since("1.5.0")
override val degreesOfFreedom = 0
override def toString: String = {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment