From 237b96bc59ab1b54c31d06a5260cd77e1eb96116 Mon Sep 17 00:00:00 2001
From: Cheng Lian <lian.cs.zju@gmail.com>
Date: Mon, 16 Jun 2014 16:42:17 -0700
Subject: [PATCH] Minor fix: made "EXPLAIN" output to play well with JDBC
 output format

Fixed the broken JDBC output. Test from Shark `beeline`:

```
beeline> !connect jdbc:hive2://localhost:10000/
scan complete in 2ms
Connecting to jdbc:hive2://localhost:10000/
Enter username for jdbc:hive2://localhost:10000/: lian
Enter password for jdbc:hive2://localhost:10000/:
Connected to: Hive (version 0.12.0)
Driver: Hive (version 0.12.0)
Transaction isolation: TRANSACTION_REPEATABLE_READ
0: jdbc:hive2://localhost:10000/>
0: jdbc:hive2://localhost:10000/> explain select * from src;
+-------------------------------------------------------------------------------+
|                                     plan                                      |
+-------------------------------------------------------------------------------+
| ExplainCommand [plan#2:0]                                                     |
|  HiveTableScan [key#0,value#1], (MetastoreRelation default, src, None), None  |
+-------------------------------------------------------------------------------+
2 rows selected (1.386 seconds)
```

Before this change, the output looked something like this:

```
+-------------------------------------------------------------------------------+
|                                     plan                                      |
+-------------------------------------------------------------------------------+
| ExplainCommand [plan#2:0]
 HiveTableScan [key#0,value#1], (MetastoreRelation default, src, None), None  |
+-------------------------------------------------------------------------------+
```

Author: Cheng Lian <lian.cs.zju@gmail.com>

Closes #1097 from liancheng/multiLineExplain and squashes the following commits:

eb37967 [Cheng Lian] Made output of "EXPLAIN" play well with JDBC output format
---
 .../main/scala/org/apache/spark/sql/execution/commands.scala  | 4 ++--
 .../org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala  | 2 +-
 .../org/apache/spark/sql/hive/execution/HiveQuerySuite.scala  | 2 +-
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 0377290af5..39b3246c87 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -83,8 +83,8 @@ case class ExplainCommand(
   override protected[sql] lazy val sideEffectResult: Seq[String] = this.toString.split("\n")
 
   def execute(): RDD[Row] = {
-    val explanation = sideEffectResult.mkString("\n")
-    context.sparkContext.parallelize(Seq(new GenericRow(Array[Any](explanation))), 1)
+    val explanation = sideEffectResult.map(row => new GenericRow(Array[Any](row)))
+    context.sparkContext.parallelize(explanation, 1)
   }
 
   override def otherCopyArgs = context :: Nil
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
index 3b9cd8f52d..10c8069a62 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
@@ -49,7 +49,7 @@ class JavaHiveQLSuite extends FunSuite {
 
   def isExplanation(result: JavaSchemaRDD) = {
     val explanation = result.collect().map(_.getString(0))
-    explanation.size == 1 && explanation.head.startsWith(explainCommandClassName)
+    explanation.size > 1 && explanation.head.startsWith(explainCommandClassName)
   }
 
   ignore("Query Hive native command execution result") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 6e8d11b8a1..04652587f9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -169,7 +169,7 @@ class HiveQuerySuite extends HiveComparisonTest {
 
   def isExplanation(result: SchemaRDD) = {
     val explanation = result.select('plan).collect().map { case Row(plan: String) => plan }
-    explanation.size == 1 && explanation.head.startsWith(explainCommandClassName)
+    explanation.size > 1 && explanation.head.startsWith(explainCommandClassName)
   }
 
   test("SPARK-1704: Explain commands as a SchemaRDD") {
-- 
GitLab