diff --git a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumer.scala b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumer.scala index 15b28256e825efcaf3d1196cc46adaf2f4c887f8..6d76904fb0e59c47f43b86677d629ab46c21de74 100644 --- a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumer.scala +++ b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumer.scala @@ -273,19 +273,7 @@ private[kafka010] case class CachedKafkaConsumer private( message: String, cause: Throwable = null): Unit = { val finalMessage = s"$message ${additionalMessage(failOnDataLoss)}" - if (failOnDataLoss) { - if (cause != null) { - throw new IllegalStateException(finalMessage) - } else { - throw new IllegalStateException(finalMessage, cause) - } - } else { - if (cause != null) { - logWarning(finalMessage) - } else { - logWarning(finalMessage, cause) - } - } + reportDataLoss0(failOnDataLoss, finalMessage, cause) } private def close(): Unit = consumer.close() @@ -398,4 +386,23 @@ private[kafka010] object CachedKafkaConsumer extends Logging { consumer } } + + private def reportDataLoss0( + failOnDataLoss: Boolean, + finalMessage: String, + cause: Throwable = null): Unit = { + if (failOnDataLoss) { + if (cause != null) { + throw new IllegalStateException(finalMessage, cause) + } else { + throw new IllegalStateException(finalMessage) + } + } else { + if (cause != null) { + logWarning(finalMessage, cause) + } else { + logWarning(finalMessage) + } + } + } } diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumerSuite.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumerSuite.scala new file mode 100644 index 0000000000000000000000000000000000000000..7aa7dd096c07bed2f2da20cce689bd869660e991 --- /dev/null +++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/CachedKafkaConsumerSuite.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.kafka010 + +import org.scalatest.PrivateMethodTester + +import org.apache.spark.sql.test.SharedSQLContext + +class CachedKafkaConsumerSuite extends SharedSQLContext with PrivateMethodTester { + + test("SPARK-19886: Report error cause correctly in reportDataLoss") { + val cause = new Exception("D'oh!") + val reportDataLoss = PrivateMethod[Unit]('reportDataLoss0) + val e = intercept[IllegalStateException] { + CachedKafkaConsumer.invokePrivate(reportDataLoss(true, "message", cause)) + } + assert(e.getCause === cause) + } +}