Skip to content
Snippets Groups Projects
Commit 07c16cb5 authored by Joseph K. Bradley's avatar Joseph K. Bradley Committed by Xiangrui Meng
Browse files

[SPARK-8053] [MLLIB] renamed scalingVector to scalingVec

I searched the Spark codebase for all occurrences of "scalingVector"

CC: mengxr

Author: Joseph K. Bradley <joseph@databricks.com>

Closes #6596 from jkbradley/scalingVec-rename and squashes the following commits:

d3812f8 [Joseph K. Bradley] renamed scalingVector to scalingVec
parent cafd5056
No related branches found
No related tags found
No related merge requests found
...@@ -41,7 +41,7 @@ class ElementwiseProduct(override val uid: String) ...@@ -41,7 +41,7 @@ class ElementwiseProduct(override val uid: String)
* the vector to multiply with input vectors * the vector to multiply with input vectors
* @group param * @group param
*/ */
val scalingVec: Param[Vector] = new Param(this, "scalingVector", "vector for hadamard product") val scalingVec: Param[Vector] = new Param(this, "scalingVec", "vector for hadamard product")
/** @group setParam */ /** @group setParam */
def setScalingVec(value: Vector): this.type = set(scalingVec, value) def setScalingVec(value: Vector): this.type = set(scalingVec, value)
......
...@@ -25,10 +25,10 @@ import org.apache.spark.mllib.linalg._ ...@@ -25,10 +25,10 @@ import org.apache.spark.mllib.linalg._
* Outputs the Hadamard product (i.e., the element-wise product) of each input vector with a * Outputs the Hadamard product (i.e., the element-wise product) of each input vector with a
* provided "weight" vector. In other words, it scales each column of the dataset by a scalar * provided "weight" vector. In other words, it scales each column of the dataset by a scalar
* multiplier. * multiplier.
* @param scalingVector The values used to scale the reference vector's individual components. * @param scalingVec The values used to scale the reference vector's individual components.
*/ */
@Experimental @Experimental
class ElementwiseProduct(val scalingVector: Vector) extends VectorTransformer { class ElementwiseProduct(val scalingVec: Vector) extends VectorTransformer {
/** /**
* Does the hadamard product transformation. * Does the hadamard product transformation.
...@@ -37,15 +37,15 @@ class ElementwiseProduct(val scalingVector: Vector) extends VectorTransformer { ...@@ -37,15 +37,15 @@ class ElementwiseProduct(val scalingVector: Vector) extends VectorTransformer {
* @return transformed vector. * @return transformed vector.
*/ */
override def transform(vector: Vector): Vector = { override def transform(vector: Vector): Vector = {
require(vector.size == scalingVector.size, require(vector.size == scalingVec.size,
s"vector sizes do not match: Expected ${scalingVector.size} but found ${vector.size}") s"vector sizes do not match: Expected ${scalingVec.size} but found ${vector.size}")
vector match { vector match {
case dv: DenseVector => case dv: DenseVector =>
val values: Array[Double] = dv.values.clone() val values: Array[Double] = dv.values.clone()
val dim = scalingVector.size val dim = scalingVec.size
var i = 0 var i = 0
while (i < dim) { while (i < dim) {
values(i) *= scalingVector(i) values(i) *= scalingVec(i)
i += 1 i += 1
} }
Vectors.dense(values) Vectors.dense(values)
...@@ -54,7 +54,7 @@ class ElementwiseProduct(val scalingVector: Vector) extends VectorTransformer { ...@@ -54,7 +54,7 @@ class ElementwiseProduct(val scalingVector: Vector) extends VectorTransformer {
val dim = values.length val dim = values.length
var i = 0 var i = 0
while (i < dim) { while (i < dim) {
values(i) *= scalingVector(indices(i)) values(i) *= scalingVec(indices(i))
i += 1 i += 1
} }
Vectors.sparse(size, indices, values) Vectors.sparse(size, indices, values)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment