Skip to content
Snippets Groups Projects
Commit fd52a747 authored by Sean Owen's avatar Sean Owen Committed by Wenchen Fan
Browse files

[SPARK-19810][SPARK-19810][MINOR][FOLLOW-UP] Follow-ups from to remove Scala 2.10

## What changes were proposed in this pull request?

Follow up to a few comments on https://github.com/apache/spark/pull/17150#issuecomment-315020196 that couldn't be addressed before it was merged.

## How was this patch tested?

Existing tests.

Author: Sean Owen <sowen@cloudera.com>

Closes #18646 from srowen/SPARK-19810.2.
parent 69e5282d
No related branches found
No related tags found
No related merge requests found
......@@ -17,17 +17,13 @@
package org.apache.spark.sql.catalyst
import java.net.URLClassLoader
import java.sql.{Date, Timestamp}
import scala.reflect.runtime.universe.typeOf
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.{BoundReference, Literal, SpecificInternalRow}
import org.apache.spark.sql.catalyst.expressions.objects.NewInstance
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
case class PrimitiveData(
intField: Int,
......@@ -339,39 +335,4 @@ class ScalaReflectionSuite extends SparkFunSuite {
assert(linkedHashMapDeserializer.dataType == ObjectType(classOf[LHMap[_, _]]))
}
private val dataTypeForComplexData = dataTypeFor[ComplexData]
private val typeOfComplexData = typeOf[ComplexData]
Seq(
("mirror", () => mirror),
("dataTypeFor", () => dataTypeFor[ComplexData]),
("constructorFor", () => deserializerFor[ComplexData]),
("extractorsFor", {
val inputObject = BoundReference(0, dataTypeForComplexData, nullable = false)
() => serializerFor[ComplexData](inputObject)
}),
("getConstructorParameters(cls)", () => getConstructorParameters(classOf[ComplexData])),
("getConstructorParameterNames", () => getConstructorParameterNames(classOf[ComplexData])),
("getClassFromType", () => getClassFromType(typeOfComplexData)),
("schemaFor", () => schemaFor[ComplexData]),
("localTypeOf", () => localTypeOf[ComplexData]),
("getClassNameFromType", () => getClassNameFromType(typeOfComplexData)),
("getParameterTypes", () => getParameterTypes(() => ())),
("getConstructorParameters(tpe)", () => getClassNameFromType(typeOfComplexData))).foreach {
case (name, exec) =>
test(s"SPARK-13640: thread safety of ${name}") {
(0 until 100).foreach { _ =>
val loader = new URLClassLoader(Array.empty, Utils.getContextOrSparkClassLoader)
(0 until 10).par.foreach { _ =>
val cl = Thread.currentThread.getContextClassLoader
try {
Thread.currentThread.setContextClassLoader(loader)
exec()
} finally {
Thread.currentThread.setContextClassLoader(cl)
}
}
}
}
}
}
......@@ -133,7 +133,7 @@ object ExtractPythonUDFs extends Rule[SparkPlan] with PredicateHelper {
val validUdfs = udfs.filter { udf =>
// Check to make sure that the UDF can be evaluated with only the input of this child.
udf.references.subsetOf(child.outputSet)
}.toArray
}
if (validUdfs.nonEmpty) {
val resultAttrs = udfs.zipWithIndex.map { case (u, i) =>
AttributeReference(s"pythonUDF$i", u.dataType)()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment