Skip to content
Snippets Groups Projects
Commit b75b3070 authored by Liang-Chi Hsieh's avatar Liang-Chi Hsieh Committed by Michael Armbrust
Browse files

[SPARK-6730][SQL] Allow using keyword as identifier in OPTIONS

JIRA: https://issues.apache.org/jira/browse/SPARK-6730

It is very possible that keyword will be used as identifier in `OPTIONS`, this pr makes it works.

However, another approach is that we can request that `OPTIONS` can't include keywords and has to use alternative identifier (e.g. table -> cassandraTable) if needed.

If so, please let me know to close this pr. Thanks.

Author: Liang-Chi Hsieh <viirya@gmail.com>

Closes #5520 from viirya/relax_options and squashes the following commits:

339fd68 [Liang-Chi Hsieh] Use regex parser.
92be11c [Liang-Chi Hsieh] Allow using keyword as identifier in OPTIONS.
parent f11288d5
No related branches found
No related tags found
No related merge requests found
......@@ -18,6 +18,7 @@
package org.apache.spark.sql.sources
import scala.language.existentials
import scala.util.matching.Regex
import scala.language.implicitConversions
import org.apache.spark.Logging
......@@ -155,7 +156,19 @@ private[sql] class DDLParser(
protected lazy val className: Parser[String] = repsep(ident, ".") ^^ { case s => s.mkString(".")}
protected lazy val pair: Parser[(String, String)] = ident ~ stringLit ^^ { case k ~ v => (k,v) }
override implicit def regexToParser(regex: Regex): Parser[String] = acceptMatch(
s"identifier matching regex ${regex}", {
case lexical.Identifier(str) if regex.unapplySeq(str).isDefined => str
case lexical.Keyword(str) if regex.unapplySeq(str).isDefined => str
}
)
protected lazy val optionName: Parser[String] = "[_a-zA-Z][a-zA-Z0-9]*".r ^^ {
case name => name
}
protected lazy val pair: Parser[(String, String)] =
optionName ~ stringLit ^^ { case k ~ v => (k,v) }
protected lazy val column: Parser[StructField] =
ident ~ dataType ~ (COMMENT ~> stringLit).? ^^ { case columnName ~ typ ~ cm =>
......
......@@ -25,17 +25,17 @@ class DDLScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
SimpleDDLScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext)
SimpleDDLScan(parameters("from").toInt, parameters("TO").toInt, parameters("Table"))(sqlContext)
}
}
case class SimpleDDLScan(from: Int, to: Int)(@transient val sqlContext: SQLContext)
case class SimpleDDLScan(from: Int, to: Int, table: String)(@transient val sqlContext: SQLContext)
extends BaseRelation with TableScan {
override def schema: StructType =
StructType(Seq(
StructField("intType", IntegerType, nullable = false,
new MetadataBuilder().putString("comment", "test comment").build()),
new MetadataBuilder().putString("comment", s"test comment $table").build()),
StructField("stringType", StringType, nullable = false),
StructField("dateType", DateType, nullable = false),
StructField("timestampType", TimestampType, nullable = false),
......@@ -73,7 +73,8 @@ class DDLTestSuite extends DataSourceTest {
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10'
| To '10',
| Table 'test1'
|)
""".stripMargin)
}
......@@ -81,7 +82,7 @@ class DDLTestSuite extends DataSourceTest {
sqlTest(
"describe ddlPeople",
Seq(
Row("intType", "int", "test comment"),
Row("intType", "int", "test comment test1"),
Row("stringType", "string", ""),
Row("dateType", "date", ""),
Row("timestampType", "timestamp", ""),
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment