diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index 04ac4f20c66ecfd4d1321394ade3a0392b4738ba..bdc52c08acb66b45b652ba27be7506fde0c26f16 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -78,10 +78,6 @@ private[sql] abstract class AbstractSparkSQLParser
 }
 
 class SqlLexical extends StdLexical {
-  case class FloatLit(chars: String) extends Token {
-    override def toString: String = chars
-  }
-
   case class DecimalLit(chars: String) extends Token {
     override def toString: String = chars
   }
@@ -106,17 +102,16 @@ class SqlLexical extends StdLexical {
   }
 
   override lazy val token: Parser[Token] =
-    ( rep1(digit) ~ ('.' ~> digit.*).? ~ (exp ~> sign.? ~ rep1(digit)) ^^ {
-        case i ~ None ~ (sig ~ rest) =>
-          DecimalLit(i.mkString + "e" + sig.mkString + rest.mkString)
-        case i ~ Some(d) ~ (sig ~ rest) =>
-          DecimalLit(i.mkString + "." + d.mkString + "e" + sig.mkString + rest.mkString)
-      }
+    ( rep1(digit) ~ scientificNotation ^^ { case i ~ s => DecimalLit(i.mkString + s) }
+    | '.' ~> (rep1(digit) ~ scientificNotation) ^^
+      { case i ~ s => DecimalLit("0." + i.mkString + s) }
+    | rep1(digit) ~ ('.' ~> digit.*) ~ scientificNotation ^^
+      { case i1 ~ i2 ~ s => DecimalLit(i1.mkString + "." + i2.mkString + s) }
     | digit.* ~ identChar ~ (identChar | digit).* ^^
       { case first ~ middle ~ rest => processIdent((first ++ (middle :: rest)).mkString) }
     | rep1(digit) ~ ('.' ~> digit.*).? ^^ {
         case i ~ None => NumericLit(i.mkString)
-        case i ~ Some(d) => FloatLit(i.mkString + "." + d.mkString)
+        case i ~ Some(d) => DecimalLit(i.mkString + "." + d.mkString)
       }
     | '\'' ~> chrExcept('\'', '\n', EofCh).* <~ '\'' ^^
       { case chars => StringLit(chars mkString "") }
@@ -133,8 +128,10 @@ class SqlLexical extends StdLexical {
 
   override def identChar: Parser[Elem] = letter | elem('_')
 
-  private lazy val sign: Parser[Elem] = elem("s", c => c == '+' || c == '-')
-  private lazy val exp: Parser[Elem] = elem("e", c => c == 'E' || c == 'e')
+  private lazy val scientificNotation: Parser[String] =
+    (elem('e') | elem('E')) ~> (elem('+') | elem('-')).? ~ rep1(digit) ^^ {
+      case s ~ rest => "e" + s.mkString + rest.mkString
+    }
 
   override def whitespace: Parser[Any] =
     ( whitespaceChar
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index 440e9e28fa783e4079f453f5514c12ec4eee4e01..cd717c09f8e5ef93ea48b05856f1ece90d80d3b6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -334,27 +334,15 @@ object SqlParser extends AbstractSparkSQLParser with DataTypeParser {
 
   protected lazy val numericLiteral: Parser[Literal] =
     ( integral  ^^ { case i => Literal(toNarrowestIntegerType(i)) }
-    | sign.? ~ unsignedFloat ^^ {
-      case s ~ f => Literal(toDecimalOrDouble(s.getOrElse("") + f))
-    }
-    | sign.? ~ unsignedDecimal ^^ {
-      case s ~ d => Literal(toDecimalOrDouble(s.getOrElse("") + d))
-    }
+    | sign.? ~ unsignedFloat ^^
+      { case s ~ f => Literal(toDecimalOrDouble(s.getOrElse("") + f)) }
     )
 
   protected lazy val unsignedFloat: Parser[String] =
     ( "." ~> numericLit ^^ { u => "0." + u }
-    | elem("decimal", _.isInstanceOf[lexical.FloatLit]) ^^ (_.chars)
+    | elem("decimal", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
     )
 
-  protected lazy val unsignedDecimal: Parser[String] =
-    ( "." ~> decimalLit ^^ { u => "0." + u }
-    | elem("scientific_notation", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
-    )
-
-  def decimalLit: Parser[String] =
-    elem("scientific_notation", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
-
   protected lazy val sign: Parser[String] = ("+" | "-")
 
   protected lazy val integral: Parser[String] =
@@ -477,7 +465,7 @@ object SqlParser extends AbstractSparkSQLParser with DataTypeParser {
 
   protected lazy val baseExpression: Parser[Expression] =
     ( "*" ^^^ UnresolvedStar(None)
-    | (ident <~ "."). + <~ "*" ^^ { case target => UnresolvedStar(Option(target))}
+    | rep1(ident <~ ".") <~ "*" ^^ { case target => UnresolvedStar(Option(target))}
     | primary
    )
 
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
index ea28bfa021bed3106399bb67bfe13dac3a045990..9ff893b84775b6f38aef579dbdb7e161b72d8c45 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
@@ -126,4 +126,25 @@ class SqlParserSuite extends PlanTest {
     checkSingleUnit("13.123456789", "second")
     checkSingleUnit("-13.123456789", "second")
   }
+
+  test("support scientific notation") {
+    def assertRight(input: String, output: Double): Unit = {
+      val parsed = SqlParser.parse("SELECT " + input)
+      val expected = Project(
+        UnresolvedAlias(
+          Literal(output)
+        ) :: Nil,
+        OneRowRelation)
+      comparePlans(parsed, expected)
+    }
+
+    assertRight("9.0e1", 90)
+    assertRight(".9e+2", 90)
+    assertRight("0.9e+2", 90)
+    assertRight("900e-1", 90)
+    assertRight("900.0E-1", 90)
+    assertRight("9.e+1", 90)
+
+    intercept[RuntimeException](SqlParser.parse("SELECT .e3"))
+  }
 }