diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index 319de710fbc3e185aa341b8c9d4c82b251154f09..2e861b84b71336d01f65889a7a0d220d607df1f1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.sources
 
 import scala.language.existentials
+import scala.util.matching.Regex
 import scala.language.implicitConversions
 
 import org.apache.spark.Logging
@@ -155,7 +156,19 @@ private[sql] class DDLParser(
 
   protected lazy val className: Parser[String] = repsep(ident, ".") ^^ { case s => s.mkString(".")}
 
-  protected lazy val pair: Parser[(String, String)] = ident ~ stringLit ^^ { case k ~ v => (k,v) }
+  override implicit def regexToParser(regex: Regex): Parser[String] = acceptMatch(
+    s"identifier matching regex ${regex}", {
+      case lexical.Identifier(str) if regex.unapplySeq(str).isDefined => str
+      case lexical.Keyword(str) if regex.unapplySeq(str).isDefined => str
+    }
+  )
+
+  protected lazy val optionName: Parser[String] = "[_a-zA-Z][a-zA-Z0-9]*".r ^^ {
+    case name => name
+  }
+
+  protected lazy val pair: Parser[(String, String)] =
+    optionName ~ stringLit ^^ { case k ~ v => (k,v) }
 
   protected lazy val column: Parser[StructField] =
     ident ~ dataType ~ (COMMENT ~> stringLit).?  ^^ { case columnName ~ typ ~ cm =>
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
index 3f24a497390c1849fbc45c81ea5b4afddd51e3be..ca25751b9583d25e4d84f7b371bf25bef7be47b5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
@@ -25,17 +25,17 @@ class DDLScanSource extends RelationProvider {
   override def createRelation(
       sqlContext: SQLContext,
       parameters: Map[String, String]): BaseRelation = {
-    SimpleDDLScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext)
+    SimpleDDLScan(parameters("from").toInt, parameters("TO").toInt, parameters("Table"))(sqlContext)
   }
 }
 
-case class SimpleDDLScan(from: Int, to: Int)(@transient val sqlContext: SQLContext)
+case class SimpleDDLScan(from: Int, to: Int, table: String)(@transient val sqlContext: SQLContext)
   extends BaseRelation with TableScan {
 
   override def schema: StructType =
     StructType(Seq(
       StructField("intType", IntegerType, nullable = false,
-        new MetadataBuilder().putString("comment", "test comment").build()),
+        new MetadataBuilder().putString("comment", s"test comment $table").build()),
       StructField("stringType", StringType, nullable = false),
       StructField("dateType", DateType, nullable = false),
       StructField("timestampType", TimestampType, nullable = false),
@@ -73,7 +73,8 @@ class DDLTestSuite extends DataSourceTest {
           |USING org.apache.spark.sql.sources.DDLScanSource
           |OPTIONS (
           |  From '1',
-          |  To '10'
+          |  To '10',
+          |  Table 'test1'
           |)
           """.stripMargin)
   }
@@ -81,7 +82,7 @@ class DDLTestSuite extends DataSourceTest {
   sqlTest(
       "describe ddlPeople",
       Seq(
-        Row("intType", "int", "test comment"),
+        Row("intType", "int", "test comment test1"),
         Row("stringType", "string", ""),
         Row("dateType", "date", ""),
         Row("timestampType", "timestamp", ""),