Skip to content
Snippets Groups Projects
Commit 94c202c7 authored by Kousuke Saruta's avatar Kousuke Saruta Committed by Reynold Xin
Browse files

[SPARK-12665][CORE][GRAPHX] Remove Vector, VectorSuite and...

[SPARK-12665][CORE][GRAPHX] Remove Vector, VectorSuite and GraphKryoRegistrator which are deprecated and no longer used

Whole code of Vector.scala, VectorSuite.scala and GraphKryoRegistrator.scala  are no longer used so it's time to remove them in Spark 2.0.

Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp>

Closes #10613 from sarutak/SPARK-12665.
parent 5d871ea4
No related branches found
No related tags found
No related merge requests found
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import scala.language.implicitConversions
import scala.util.Random
import org.apache.spark.util.random.XORShiftRandom
@deprecated("Use Vectors.dense from Spark's mllib.linalg package instead.", "1.0.0")
class Vector(val elements: Array[Double]) extends Serializable {
def length: Int = elements.length
def apply(index: Int): Double = elements(index)
def + (other: Vector): Vector = {
if (length != other.length) {
throw new IllegalArgumentException("Vectors of different length")
}
Vector(length, i => this(i) + other(i))
}
def add(other: Vector): Vector = this + other
def - (other: Vector): Vector = {
if (length != other.length) {
throw new IllegalArgumentException("Vectors of different length")
}
Vector(length, i => this(i) - other(i))
}
def subtract(other: Vector): Vector = this - other
def dot(other: Vector): Double = {
if (length != other.length) {
throw new IllegalArgumentException("Vectors of different length")
}
var ans = 0.0
var i = 0
while (i < length) {
ans += this(i) * other(i)
i += 1
}
ans
}
/**
* return (this + plus) dot other, but without creating any intermediate storage
* @param plus
* @param other
* @return
*/
def plusDot(plus: Vector, other: Vector): Double = {
if (length != other.length) {
throw new IllegalArgumentException("Vectors of different length")
}
if (length != plus.length) {
throw new IllegalArgumentException("Vectors of different length")
}
var ans = 0.0
var i = 0
while (i < length) {
ans += (this(i) + plus(i)) * other(i)
i += 1
}
ans
}
def += (other: Vector): Vector = {
if (length != other.length) {
throw new IllegalArgumentException("Vectors of different length")
}
var i = 0
while (i < length) {
elements(i) += other(i)
i += 1
}
this
}
def addInPlace(other: Vector): Vector = this +=other
def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
def multiply (d: Double): Vector = this * d
def / (d: Double): Vector = this * (1 / d)
def divide (d: Double): Vector = this / d
def unary_- : Vector = this * -1
def sum: Double = elements.reduceLeft(_ + _)
def squaredDist(other: Vector): Double = {
var ans = 0.0
var i = 0
while (i < length) {
ans += (this(i) - other(i)) * (this(i) - other(i))
i += 1
}
ans
}
def dist(other: Vector): Double = math.sqrt(squaredDist(other))
override def toString: String = elements.mkString("(", ", ", ")")
}
@deprecated("Use Vectors.dense from Spark's mllib.linalg package instead.", "1.0.0")
object Vector {
def apply(elements: Array[Double]): Vector = new Vector(elements)
def apply(elements: Double*): Vector = new Vector(elements.toArray)
def apply(length: Int, initializer: Int => Double): Vector = {
val elements: Array[Double] = Array.tabulate(length)(initializer)
new Vector(elements)
}
def zeros(length: Int): Vector = new Vector(new Array[Double](length))
def ones(length: Int): Vector = Vector(length, _ => 1)
/**
* Creates this [[org.apache.spark.util.Vector]] of given length containing random numbers
* between 0.0 and 1.0. Optional scala.util.Random number generator can be provided.
*/
def random(length: Int, random: Random = new XORShiftRandom()): Vector =
Vector(length, _ => random.nextDouble())
class Multiplier(num: Double) {
def * (vec: Vector): Vector = vec * num
}
implicit def doubleToMultiplier(num: Double): Multiplier = new Multiplier(num)
implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
def addInPlace(t1: Vector, t2: Vector): Vector = t1 + t2
def zero(initialValue: Vector): Vector = Vector.zeros(initialValue.length)
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import scala.util.Random
import org.apache.spark.SparkFunSuite
/**
* Tests org.apache.spark.util.Vector functionality
*/
@deprecated("suppress compile time deprecation warning", "1.0.0")
class VectorSuite extends SparkFunSuite {
def verifyVector(vector: Vector, expectedLength: Int): Unit = {
assert(vector.length == expectedLength)
assert(vector.elements.min > 0.0)
assert(vector.elements.max < 1.0)
}
test("random with default random number generator") {
val vector100 = Vector.random(100)
verifyVector(vector100, 100)
}
test("random with given random number generator") {
val vector100 = Vector.random(100, new Random(100))
verifyVector(vector100, 100)
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.graphx.impl._
import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap
import org.apache.spark.serializer.KryoRegistrator
import org.apache.spark.util.BoundedPriorityQueue
import org.apache.spark.util.collection.BitSet
import org.apache.spark.util.collection.OpenHashSet
/**
* Registers GraphX classes with Kryo for improved performance.
*/
@deprecated("Register GraphX classes with Kryo using GraphXUtils.registerKryoClasses", "1.2.0")
class GraphKryoRegistrator extends KryoRegistrator {
def registerClasses(kryo: Kryo) {
kryo.register(classOf[Edge[Object]])
kryo.register(classOf[(VertexId, Object)])
kryo.register(classOf[EdgePartition[Object, Object]])
kryo.register(classOf[BitSet])
kryo.register(classOf[VertexIdToIndexMap])
kryo.register(classOf[VertexAttributeBlock[Object]])
kryo.register(classOf[PartitionStrategy])
kryo.register(classOf[BoundedPriorityQueue[Object]])
kryo.register(classOf[EdgeDirection])
kryo.register(classOf[GraphXPrimitiveKeyOpenHashMap[VertexId, Int]])
kryo.register(classOf[OpenHashSet[Int]])
kryo.register(classOf[OpenHashSet[Long]])
}
}
......@@ -112,6 +112,13 @@ object MimaExcludes {
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.api.java.JavaSparkContext.defaultMinSplits"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.api.java.JavaSparkContext.clearJars"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.api.java.JavaSparkContext.clearFiles")
) ++
// SPARK-12665 Remove deprecated and unused classes
Seq(
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.graphx.GraphKryoRegistrator"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.util.Vector"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.util.Vector$Multiplier"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.util.Vector$")
)
case v if v.startsWith("1.6") =>
Seq(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment