Skip to content
Snippets Groups Projects
Commit f9b397f5 authored by Yin Huai's avatar Yin Huai
Browse files

[SPARK-8567] [SQL] Add logs to record the progress of HiveSparkSubmitSuite.

Author: Yin Huai <yhuai@databricks.com>

Closes #7009 from yhuai/SPARK-8567 and squashes the following commits:

62fb1f9 [Yin Huai] Add sc.stop().
b22cf7d [Yin Huai] Add logs.
parent e988adb5
No related branches found
No related tags found
No related merge requests found
...@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging { ...@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging {
val sc = new SparkContext(conf) val sc = new SparkContext(conf)
val hiveContext = new TestHiveContext(sc) val hiveContext = new TestHiveContext(sc)
val df = hiveContext.createDataFrame((1 to 100).map(i => (i, i))).toDF("i", "j") val df = hiveContext.createDataFrame((1 to 100).map(i => (i, i))).toDF("i", "j")
logInfo("Testing load classes at the driver side.")
// First, we load classes at driver side. // First, we load classes at driver side.
try { try {
Class.forName(args(0), true, Thread.currentThread().getContextClassLoader) Class.forName(args(0), true, Thread.currentThread().getContextClassLoader)
...@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging { ...@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging {
throw new Exception("Could not load user class from jar:\n", t) throw new Exception("Could not load user class from jar:\n", t)
} }
// Second, we load classes at the executor side. // Second, we load classes at the executor side.
logInfo("Testing load classes at the executor side.")
val result = df.mapPartitions { x => val result = df.mapPartitions { x =>
var exception: String = null var exception: String = null
try { try {
...@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging { ...@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging {
} }
// Load a Hive UDF from the jar. // Load a Hive UDF from the jar.
logInfo("Registering temporary Hive UDF provided in a jar.")
hiveContext.sql( hiveContext.sql(
""" """
|CREATE TEMPORARY FUNCTION example_max |CREATE TEMPORARY FUNCTION example_max
...@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging { ...@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging {
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val") hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
source.registerTempTable("sourceTable") source.registerTempTable("sourceTable")
// Load a Hive SerDe from the jar. // Load a Hive SerDe from the jar.
logInfo("Creating a Hive table with a SerDe provided in a jar.")
hiveContext.sql( hiveContext.sql(
""" """
|CREATE TABLE t1(key int, val string) |CREATE TABLE t1(key int, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' |ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
""".stripMargin) """.stripMargin)
// Actually use the loaded UDF and SerDe. // Actually use the loaded UDF and SerDe.
logInfo("Writing data into the table.")
hiveContext.sql( hiveContext.sql(
"INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM sourceTable GROUP BY val") "INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM sourceTable GROUP BY val")
logInfo("Running a simple query on the table.")
val count = hiveContext.table("t1").orderBy("key", "val").count() val count = hiveContext.table("t1").orderBy("key", "val").count()
if (count != 10) { if (count != 10) {
throw new Exception(s"table t1 should have 10 rows instead of $count rows") throw new Exception(s"table t1 should have 10 rows instead of $count rows")
} }
logInfo("Test finishes.")
sc.stop()
} }
} }
...@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging { ...@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging {
val hiveContext = new TestHiveContext(sc) val hiveContext = new TestHiveContext(sc)
// Run a simple command to make sure all lazy vals in hiveContext get instantiated. // Run a simple command to make sure all lazy vals in hiveContext get instantiated.
hiveContext.tables().collect() hiveContext.tables().collect()
sc.stop()
} }
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment