diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
index e9bfa09b7dff8865e62783becfeb1362ec2bd273..d31164fe94821caca17d1e2b8f3dee6ef28d8499 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
@@ -39,29 +39,37 @@ abstract class QueryPlan[PlanType <: QueryPlan[PlanType]] extends TreeNode[PlanT
   }
 
   /**
-   * Infers a set of `isNotNull` constraints from a given set of equality/comparison expressions.
-   * For e.g., if an expression is of the form (`a > 5`), this returns a constraint of the form
-   * `isNotNull(a)`
+   * Infers a set of `isNotNull` constraints from a given set of equality/comparison expressions as
+   * well as non-nullable attributes. For e.g., if an expression is of the form (`a > 5`), this
+   * returns a constraint of the form `isNotNull(a)`
    */
   private def constructIsNotNullConstraints(constraints: Set[Expression]): Set[Expression] = {
-    // Currently we only propagate constraints if the condition consists of equality
-    // and ranges. For all other cases, we return an empty set of constraints
-    constraints.map {
+    var isNotNullConstraints = Set.empty[Expression]
+
+    // First, we propagate constraints if the condition consists of equality and ranges. For all
+    // other cases, we return an empty set of constraints
+    constraints.foreach {
       case EqualTo(l, r) =>
-        Set(IsNotNull(l), IsNotNull(r))
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
       case GreaterThan(l, r) =>
-        Set(IsNotNull(l), IsNotNull(r))
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
       case GreaterThanOrEqual(l, r) =>
-        Set(IsNotNull(l), IsNotNull(r))
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
       case LessThan(l, r) =>
-        Set(IsNotNull(l), IsNotNull(r))
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
       case LessThanOrEqual(l, r) =>
-        Set(IsNotNull(l), IsNotNull(r))
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
       case Not(EqualTo(l, r)) =>
-        Set(IsNotNull(l), IsNotNull(r))
-      case _ =>
-        Set.empty[Expression]
-    }.foldLeft(Set.empty[Expression])(_ union _.toSet)
+        isNotNullConstraints ++= Set(IsNotNull(l), IsNotNull(r))
+      case _ => // No inference
+    }
+
+    // Second, we infer additional constraints from non-nullable attributes that are part of the
+    // operator's output
+    val nonNullableAttributes = output.filterNot(_.nullable)
+    isNotNullConstraints ++= nonNullableAttributes.map(IsNotNull).toSet
+
+    isNotNullConstraints -- constraints
   }
 
   /**
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
index f3ab026192f094c1bdde5642a3ad4fbf9ad810d5..e5063599a353ed78a476ce9cb88d57ad6b7f9190 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
@@ -23,6 +23,7 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.dsl.plans._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.types.{IntegerType, StringType}
 
 class ConstraintPropagationSuite extends SparkFunSuite {
 
@@ -217,4 +218,12 @@ class ConstraintPropagationSuite extends SparkFunSuite {
         IsNotNull(resolveColumn(tr, "a")),
         IsNotNull(resolveColumn(tr, "b")))))
   }
+
+  test("infer IsNotNull constraints from non-nullable attributes") {
+    val tr = LocalRelation('a.int, AttributeReference("b", IntegerType, nullable = false)(),
+      AttributeReference("c", StringType, nullable = false)())
+
+    verifyConstraints(tr.analyze.constraints,
+      ExpressionSet(Seq(IsNotNull(resolveColumn(tr, "b")), IsNotNull(resolveColumn(tr, "c")))))
+  }
 }
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
index 8bd731dda22d2152131c007fc58401dec61d6d55..650797f7683e124627d32aa9fe07a9c82698be18 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
@@ -341,9 +341,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
     "udf_round_3",
     "view_cast",
 
-    // enable this after fixing SPARK-14137
-    "union20",
-
     // These tests check the VIEW table definition, but Spark handles CREATE VIEW itself and
     // generates different View Expanded Text.
     "alter_view_as_select",
@@ -1046,6 +1043,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
     "union18",
     "union19",
     "union2",
+    "union20",
     "union22",
     "union23",
     "union24",