Skip to content
Snippets Groups Projects
Commit a8f51b82 authored by Michael Armbrust's avatar Michael Armbrust
Browse files

[SPARK-6458][SQL] Better error messages for invalid data sources

Avoid unclear match errors and use `AnalysisException`.

Author: Michael Armbrust <michael@databricks.com>

Closes #5158 from marmbrus/dataSourceError and squashes the following commits:

af9f82a [Michael Armbrust] Yins comment
90c6ba4 [Michael Armbrust] Better error messages for invalid data sources
parent cbeaf9eb
No related branches found
No related tags found
No related merge requests found
...@@ -21,7 +21,7 @@ import scala.language.existentials ...@@ -21,7 +21,7 @@ import scala.language.existentials
import scala.language.implicitConversions import scala.language.implicitConversions
import org.apache.spark.Logging import org.apache.spark.Logging
import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext} import org.apache.spark.sql.{AnalysisException, SaveMode, DataFrame, SQLContext}
import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.AbstractSparkSQLParser import org.apache.spark.sql.catalyst.AbstractSparkSQLParser
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
...@@ -204,19 +204,25 @@ private[sql] object ResolvedDataSource { ...@@ -204,19 +204,25 @@ private[sql] object ResolvedDataSource {
provider: String, provider: String,
options: Map[String, String]): ResolvedDataSource = { options: Map[String, String]): ResolvedDataSource = {
val clazz: Class[_] = lookupDataSource(provider) val clazz: Class[_] = lookupDataSource(provider)
def className = clazz.getCanonicalName
val relation = userSpecifiedSchema match { val relation = userSpecifiedSchema match {
case Some(schema: StructType) => clazz.newInstance() match { case Some(schema: StructType) => clazz.newInstance() match {
case dataSource: SchemaRelationProvider => case dataSource: SchemaRelationProvider =>
dataSource.createRelation(sqlContext, new CaseInsensitiveMap(options), schema) dataSource.createRelation(sqlContext, new CaseInsensitiveMap(options), schema)
case dataSource: org.apache.spark.sql.sources.RelationProvider => case dataSource: org.apache.spark.sql.sources.RelationProvider =>
sys.error(s"${clazz.getCanonicalName} does not allow user-specified schemas.") throw new AnalysisException(s"$className does not allow user-specified schemas.")
case _ =>
throw new AnalysisException(s"$className is not a RelationProvider.")
} }
case None => clazz.newInstance() match { case None => clazz.newInstance() match {
case dataSource: RelationProvider => case dataSource: RelationProvider =>
dataSource.createRelation(sqlContext, new CaseInsensitiveMap(options)) dataSource.createRelation(sqlContext, new CaseInsensitiveMap(options))
case dataSource: org.apache.spark.sql.sources.SchemaRelationProvider => case dataSource: org.apache.spark.sql.sources.SchemaRelationProvider =>
sys.error(s"A schema needs to be specified when using ${clazz.getCanonicalName}.") throw new AnalysisException(
s"A schema needs to be specified when using $className.")
case _ =>
throw new AnalysisException(s"$className is not a RelationProvider.")
} }
} }
new ResolvedDataSource(clazz, relation) new ResolvedDataSource(clazz, relation)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment