diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONRelation.scala index b92edf65bfb6bbdbbb3d9f63c9560c4bb9a10830..8a6fa4aeebc09a3de47a6008900b699d053bd1b4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONRelation.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONRelation.scala @@ -68,29 +68,12 @@ private[sql] class JSONRelation( val maybeDataSchema: Option[StructType], val maybePartitionSpec: Option[PartitionSpec], override val userDefinedPartitionColumns: Option[StructType], - override val bucketSpec: Option[BucketSpec], + override val bucketSpec: Option[BucketSpec] = None, override val paths: Array[String] = Array.empty[String], parameters: Map[String, String] = Map.empty[String, String]) (@transient val sqlContext: SQLContext) extends HadoopFsRelation(maybePartitionSpec, parameters) { - def this( - inputRDD: Option[RDD[String]], - maybeDataSchema: Option[StructType], - maybePartitionSpec: Option[PartitionSpec], - userDefinedPartitionColumns: Option[StructType], - paths: Array[String] = Array.empty[String], - parameters: Map[String, String] = Map.empty[String, String])(sqlContext: SQLContext) = { - this( - inputRDD, - maybeDataSchema, - maybePartitionSpec, - userDefinedPartitionColumns, - None, - paths, - parameters)(sqlContext) - } - val options: JSONOptions = JSONOptions.createFromConfigMap(parameters) /** Constraints to be imposed on schema to be stored. */