diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index d4a4c35691bcf94777b1ded471e7bac8e66329b9..f3acb70e037414f99055a9a1edf156c8affa2e57 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -294,13 +294,10 @@ object OptimizeIn extends Rule[LogicalPlan] {
 
 /**
  * Simplifies boolean expressions:
- *
  * 1. Simplifies expressions whose answer can be determined without evaluating both sides.
  * 2. Eliminates / extracts common factors.
- * 3. Removes `Not` operator.
- *
- * Note that this rule can eliminate expressions that might otherwise have been evaluated and thus
- * is only safe when evaluations of expressions does not result in side effects.
+ * 3. Merge same expressions
+ * 4. Removes `Not` operator.
  */
 object BooleanSimplification extends Rule[LogicalPlan] with PredicateHelper {
   def apply(plan: LogicalPlan): LogicalPlan = plan transform {
@@ -311,9 +308,26 @@ object BooleanSimplification extends Rule[LogicalPlan] with PredicateHelper {
           case (l, Literal(true, BooleanType)) => l
           case (Literal(false, BooleanType), _) => Literal(false)
           case (_, Literal(false, BooleanType)) => Literal(false)
-          // a && a && a ... => a
-          case _ if splitConjunctivePredicates(and).distinct.size == 1 => left
-          case _ => and
+          // a && a => a
+          case (l, r) if l fastEquals r => l
+          case (_, _) =>
+            val lhsSet = splitDisjunctivePredicates(left).toSet
+            val rhsSet = splitDisjunctivePredicates(right).toSet
+            val common = lhsSet.intersect(rhsSet)
+            val ldiff = lhsSet.diff(common)
+            val rdiff = rhsSet.diff(common)
+            if (ldiff.size == 0 || rdiff.size == 0) {
+              // a && (a || b)
+              common.reduce(Or)
+            } else {
+              // (a || b || c || ...) && (a || b || d || ...) && (a || b || e || ...) ... =>
+              // (a || b) || ((c || ...) && (f || ...) && (e || ...) && ...)
+              (ldiff.reduceOption(Or) ++ rdiff.reduceOption(Or))
+                .reduceOption(And)
+                .map(_ :: common.toList)
+                .getOrElse(common.toList)
+                .reduce(Or)
+            }
         }
 
       case or @ Or(left, right) =>
@@ -322,19 +336,26 @@ object BooleanSimplification extends Rule[LogicalPlan] with PredicateHelper {
           case (_, Literal(true, BooleanType)) => Literal(true)
           case (Literal(false, BooleanType), r) => r
           case (l, Literal(false, BooleanType)) => l
-          // a || a || a ... => a
-          case _ if splitDisjunctivePredicates(or).distinct.size == 1 => left
-          // (a && b && c && ...) || (a && b && d && ...) => a && b && (c || d || ...)
-          case _ =>
+          // a || a => a
+          case (l, r) if l fastEquals r => l
+          case (_, _) =>
             val lhsSet = splitConjunctivePredicates(left).toSet
             val rhsSet = splitConjunctivePredicates(right).toSet
             val common = lhsSet.intersect(rhsSet)
-
-            (lhsSet.diff(common).reduceOption(And) ++ rhsSet.diff(common).reduceOption(And))
-              .reduceOption(Or)
-              .map(_ :: common.toList)
-              .getOrElse(common.toList)
-              .reduce(And)
+            val ldiff = lhsSet.diff(common)
+            val rdiff = rhsSet.diff(common)
+            if ( ldiff.size == 0 || rdiff.size == 0) {
+              // a || (b && a)
+              common.reduce(And)
+            } else {
+              // (a && b && c && ...) || (a && b && d && ...) || (a && b && e && ...) ... =>
+              // a && b && ((c && ...) || (d && ...) || (e && ...) || ...)
+              (ldiff.reduceOption(And) ++ rdiff.reduceOption(And))
+                .reduceOption(Or)
+                .map(_ :: common.toList)
+                .getOrElse(common.toList)
+                .reduce(And)
+            }
         }
 
       case not @ Not(exp) =>
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
new file mode 100644
index 0000000000000000000000000000000000000000..a0863dad96eb0afe5cff0a6c3d3988084368b00d
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.catalyst.analysis.EliminateAnalysisOperators
+import org.apache.spark.sql.catalyst.expressions.{Literal, Expression}
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.plans.PlanTest
+import org.apache.spark.sql.catalyst.rules._
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.dsl.expressions._
+
+class BooleanSimplificationSuite extends PlanTest {
+
+  object Optimize extends RuleExecutor[LogicalPlan] {
+    val batches =
+      Batch("AnalysisNodes", Once,
+        EliminateAnalysisOperators) ::
+      Batch("Constant Folding", FixedPoint(50),
+        NullPropagation,
+        ConstantFolding,
+        BooleanSimplification,
+        SimplifyFilters) :: Nil
+  }
+
+  val testRelation = LocalRelation('a.int, 'b.int, 'c.int, 'd.string)
+
+  def checkCondition(originCondition: Expression, optimizedCondition: Expression): Unit = {
+    val originQuery = testRelation.where(originCondition).analyze
+    val optimized = Optimize(originQuery)
+    val expected = testRelation.where(optimizedCondition).analyze
+    comparePlans(optimized, expected)
+  }
+
+  test("a && a => a") {
+    checkCondition(Literal(1) < 'a && Literal(1) < 'a, Literal(1) < 'a)
+    checkCondition(Literal(1) < 'a && Literal(1) < 'a && Literal(1) < 'a, Literal(1) < 'a)
+  }
+
+  test("a || a => a") {
+    checkCondition(Literal(1) < 'a || Literal(1) < 'a, Literal(1) < 'a)
+    checkCondition(Literal(1) < 'a || Literal(1) < 'a || Literal(1) < 'a, Literal(1) < 'a)
+  }
+
+  test("(a && b && c && ...) || (a && b && d && ...) || (a && b && e && ...) ...") {
+    checkCondition('b > 3 || 'c > 5, 'b > 3 || 'c > 5)
+
+    checkCondition(('a < 2 && 'a > 3 && 'b > 5) || 'a < 2,  'a < 2)
+
+    checkCondition('a < 2 || ('a < 2 && 'a > 3 && 'b > 5),  'a < 2)
+
+    val input = ('a === 'b && 'b > 3 && 'c > 2) ||
+      ('a === 'b && 'c < 1 && 'a === 5) ||
+      ('a === 'b && 'b < 5 && 'a > 1)
+
+    val expected =
+      (((('b > 3) && ('c > 2)) ||
+        (('c < 1) && ('a === 5))) ||
+        (('b < 5) && ('a > 1))) && ('a === 'b)
+    checkCondition(input, expected)
+
+  }
+
+  test("(a || b || c || ...) && (a || b || d || ...) && (a || b || e || ...) ...") {
+    checkCondition('b > 3 && 'c > 5, 'b > 3 && 'c > 5)
+
+    checkCondition(('a < 2 || 'a > 3 || 'b > 5) && 'a < 2, 'a < 2)
+
+    checkCondition('a < 2 && ('a < 2 || 'a > 3 || 'b > 5) , 'a < 2)
+
+    checkCondition(('a < 2 || 'b > 3) && ('a < 2 || 'c > 5), ('b > 3 && 'c > 5) || 'a < 2)
+
+    var input: Expression = ('a === 'b || 'b > 3) && ('a === 'b || 'a > 3) && ('a === 'b || 'a < 5)
+    var expected: Expression = ('b > 3 && 'a > 3 && 'a < 5) || 'a === 'b
+    checkCondition(input, expected)
+  }
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NormalizeFiltersSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NormalizeFiltersSuite.scala
deleted file mode 100644
index 906300d8336cb9059af713d1c186b05c9ce55874..0000000000000000000000000000000000000000
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NormalizeFiltersSuite.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.catalyst.optimizer
-
-import org.apache.spark.sql.catalyst.analysis.EliminateAnalysisOperators
-import org.apache.spark.sql.catalyst.expressions.{And, Expression, Or}
-import org.apache.spark.sql.catalyst.plans.PlanTest
-import org.apache.spark.sql.catalyst.plans.logical.{Filter, LocalRelation, LogicalPlan}
-import org.apache.spark.sql.catalyst.rules.RuleExecutor
-
-// For implicit conversions
-import org.apache.spark.sql.catalyst.dsl.expressions._
-import org.apache.spark.sql.catalyst.dsl.plans._
-
-class NormalizeFiltersSuite extends PlanTest {
-  object Optimize extends RuleExecutor[LogicalPlan] {
-    val batches = Seq(
-      Batch("AnalysisNodes", Once,
-        EliminateAnalysisOperators),
-      Batch("NormalizeFilters", FixedPoint(100),
-        BooleanSimplification,
-        SimplifyFilters))
-  }
-
-  val relation = LocalRelation('a.int, 'b.int, 'c.string)
-
-  def checkExpression(original: Expression, expected: Expression): Unit = {
-    val actual = Optimize(relation.where(original)).collect { case f: Filter => f.condition }.head
-    val result = (actual, expected) match {
-      case (And(l1, r1), And(l2, r2)) => (l1 == l2 && r1 == r2) || (l1 == r2 && l2 == r1)
-      case (Or (l1, r1), Or (l2, r2)) => (l1 == l2 && r1 == r2) || (l1 == r2 && l2 == r1)
-      case (lhs, rhs) => lhs fastEquals rhs
-    }
-
-    assert(result, s"$actual isn't equivalent to $expected")
-  }
-
-  test("a && a => a") {
-    checkExpression('a === 1 && 'a === 1, 'a === 1)
-    checkExpression('a === 1 && 'a === 1 && 'a === 1, 'a === 1)
-  }
-
-  test("a || a => a") {
-    checkExpression('a === 1 || 'a === 1, 'a === 1)
-    checkExpression('a === 1 || 'a === 1 || 'a === 1, 'a === 1)
-  }
-
-  test("(a && b) || (a && c) => a && (b || c)") {
-    checkExpression(
-      ('a === 1 && 'a < 10) || ('a > 2 && 'a === 1),
-      ('a === 1) && ('a < 10 || 'a > 2))
-
-    checkExpression(
-      ('a < 1 && 'b > 2 && 'c.isNull) || ('a < 1 && 'c === "hello" && 'b > 2),
-      ('c.isNull || 'c === "hello") && 'a < 1 && 'b > 2)
-  }
-}