Skip to content
Snippets Groups Projects
Commit 1c80d66e authored by navis.ryu's avatar navis.ryu Committed by Michael Armbrust
Browse files

[SPARK-11546] Thrift server makes too many logs about result schema

SparkExecuteStatementOperation logs result schema for each getNextRowSet() calls which is by default every 1000 rows, overwhelming whole log file.

Author: navis.ryu <navis@apache.org>

Closes #9514 from navis/SPARK-11546.
parent 6d0ead32
No related branches found
No related tags found
No related merge requests found
......@@ -53,6 +53,18 @@ private[hive] class SparkExecuteStatementOperation(
private var dataTypes: Array[DataType] = _
private var statementId: String = _
private lazy val resultSchema: TableSchema = {
if (result == null || result.queryExecution.analyzed.output.size == 0) {
new TableSchema(Arrays.asList(new FieldSchema("Result", "string", "")))
} else {
logInfo(s"Result Schema: ${result.queryExecution.analyzed.output}")
val schema = result.queryExecution.analyzed.output.map { attr =>
new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "")
}
new TableSchema(schema.asJava)
}
}
def close(): Unit = {
// RDDs will be cleaned automatically upon garbage collection.
hiveContext.sparkContext.clearJobGroup()
......@@ -120,17 +132,7 @@ private[hive] class SparkExecuteStatementOperation(
}
}
def getResultSetSchema: TableSchema = {
if (result == null || result.queryExecution.analyzed.output.size == 0) {
new TableSchema(Arrays.asList(new FieldSchema("Result", "string", "")))
} else {
logInfo(s"Result Schema: ${result.queryExecution.analyzed.output}")
val schema = result.queryExecution.analyzed.output.map { attr =>
new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "")
}
new TableSchema(schema.asJava)
}
}
def getResultSetSchema: TableSchema = resultSchema
override def run(): Unit = {
setState(OperationState.PENDING)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment