diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 7ff8795d4f05ea3984d128e42486f90866fff2ad..77d40a5079cbb010fb32ad1cfbd7c43f18d813eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -361,116 +361,6 @@ case class Remainder(left: Expression, right: Expression)
   }
 }
 
-case class MaxOf(left: Expression, right: Expression)
-  extends BinaryArithmetic with NonSQLExpression {
-
-  // TODO: Remove MaxOf and MinOf, and replace its usage with Greatest and Least.
-
-  override def inputType: AbstractDataType = TypeCollection.Ordered
-
-  override def nullable: Boolean = left.nullable && right.nullable
-
-  private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
-
-  override def eval(input: InternalRow): Any = {
-    val input1 = left.eval(input)
-    val input2 = right.eval(input)
-    if (input1 == null) {
-      input2
-    } else if (input2 == null) {
-      input1
-    } else {
-      if (ordering.compare(input1, input2) < 0) {
-        input2
-      } else {
-        input1
-      }
-    }
-  }
-
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
-    val eval1 = left.genCode(ctx)
-    val eval2 = right.genCode(ctx)
-    val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
-
-    ev.copy(code = eval1.code + eval2.code + s"""
-      boolean ${ev.isNull} = false;
-      ${ctx.javaType(left.dataType)} ${ev.value} =
-        ${ctx.defaultValue(left.dataType)};
-
-      if (${eval1.isNull}) {
-        ${ev.isNull} = ${eval2.isNull};
-        ${ev.value} = ${eval2.value};
-      } else if (${eval2.isNull}) {
-        ${ev.isNull} = ${eval1.isNull};
-        ${ev.value} = ${eval1.value};
-      } else {
-        if ($compCode > 0) {
-          ${ev.value} = ${eval1.value};
-        } else {
-          ${ev.value} = ${eval2.value};
-        }
-      }""")
-  }
-
-  override def symbol: String = "max"
-}
-
-case class MinOf(left: Expression, right: Expression)
-  extends BinaryArithmetic with NonSQLExpression {
-
-  // TODO: Remove MaxOf and MinOf, and replace its usage with Greatest and Least.
-
-  override def inputType: AbstractDataType = TypeCollection.Ordered
-
-  override def nullable: Boolean = left.nullable && right.nullable
-
-  private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
-
-  override def eval(input: InternalRow): Any = {
-    val input1 = left.eval(input)
-    val input2 = right.eval(input)
-    if (input1 == null) {
-      input2
-    } else if (input2 == null) {
-      input1
-    } else {
-      if (ordering.compare(input1, input2) < 0) {
-        input1
-      } else {
-        input2
-      }
-    }
-  }
-
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
-    val eval1 = left.genCode(ctx)
-    val eval2 = right.genCode(ctx)
-    val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
-
-    ev.copy(code = eval1.code + eval2.code + s"""
-      boolean ${ev.isNull} = false;
-      ${ctx.javaType(left.dataType)} ${ev.value} =
-        ${ctx.defaultValue(left.dataType)};
-
-      if (${eval1.isNull}) {
-        ${ev.isNull} = ${eval2.isNull};
-        ${ev.value} = ${eval2.value};
-      } else if (${eval2.isNull}) {
-        ${ev.isNull} = ${eval1.isNull};
-        ${ev.value} = ${eval1.value};
-      } else {
-        if ($compCode < 0) {
-          ${ev.value} = ${eval1.value};
-        } else {
-          ${ev.value} = ${eval2.value};
-        }
-      }""")
-  }
-
-  override def symbol: String = "min"
-}
-
 @ExpressionDescription(
   usage = "_FUNC_(a, b) - Returns the positive modulo",
   extended = "> SELECT _FUNC_(10,3);\n 1")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index fe328fd598d7180d2d3a1679c8a608d77657ac44..75130007b963a79cf4030a857fdc37746037280d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -662,10 +662,6 @@ object NullPropagation extends Rule[LogicalPlan] {
       case e @ Substring(_, Literal(null, _), _) => Literal.create(null, e.dataType)
       case e @ Substring(_, _, Literal(null, _)) => Literal.create(null, e.dataType)
 
-      // MaxOf and MinOf can't do null propagation
-      case e: MaxOf => e
-      case e: MinOf => e
-
       // Put exceptional cases above if any
       case e @ BinaryArithmetic(Literal(null, _), _) => Literal.create(null, e.dataType)
       case e @ BinaryArithmetic(_, Literal(null, _)) => Literal.create(null, e.dataType)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
index 76e42d9afa4c30c3d4d4bd765d93a2b7a58b2b35..35f75697b72d7faf53c18d13802503a317d21cd1 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
@@ -78,8 +78,6 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
     assertErrorForDifferingTypes(BitwiseAnd('intField, 'booleanField))
     assertErrorForDifferingTypes(BitwiseOr('intField, 'booleanField))
     assertErrorForDifferingTypes(BitwiseXor('intField, 'booleanField))
-    assertErrorForDifferingTypes(MaxOf('intField, 'booleanField))
-    assertErrorForDifferingTypes(MinOf('intField, 'booleanField))
 
     assertError(Add('booleanField, 'booleanField), "requires (numeric or calendarinterval) type")
     assertError(Subtract('booleanField, 'booleanField),
@@ -91,11 +89,6 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
     assertError(BitwiseAnd('booleanField, 'booleanField), "requires integral type")
     assertError(BitwiseOr('booleanField, 'booleanField), "requires integral type")
     assertError(BitwiseXor('booleanField, 'booleanField), "requires integral type")
-
-    assertError(MaxOf('mapField, 'mapField),
-      s"requires ${TypeCollection.Ordered.simpleString} type")
-    assertError(MinOf('mapField, 'mapField),
-      s"requires ${TypeCollection.Ordered.simpleString} type")
   }
 
   test("check types for predicates") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
index 2e37887fbc82201d2065cc60e81a2d11f45cbc1e..321d820b70f4b72a3306380e5f351d67ea8ec8ca 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
@@ -194,56 +194,6 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
     }
   }
 
-  test("MaxOf basic") {
-    testNumericDataTypes { convert =>
-      val small = Literal(convert(1))
-      val large = Literal(convert(2))
-      checkEvaluation(MaxOf(small, large), convert(2))
-      checkEvaluation(MaxOf(large, small), convert(2))
-      checkEvaluation(MaxOf(Literal.create(null, small.dataType), large), convert(2))
-      checkEvaluation(MaxOf(large, Literal.create(null, small.dataType)), convert(2))
-    }
-    checkEvaluation(MaxOf(positiveShortLit, negativeShortLit), (positiveShort).toShort)
-    checkEvaluation(MaxOf(positiveIntLit, negativeIntLit), positiveInt)
-    checkEvaluation(MaxOf(positiveLongLit, negativeLongLit), positiveLong)
-
-    DataTypeTestUtils.ordered.foreach { tpe =>
-      checkConsistencyBetweenInterpretedAndCodegen(MaxOf, tpe, tpe)
-    }
-  }
-
-  test("MaxOf for atomic type") {
-    checkEvaluation(MaxOf(true, false), true)
-    checkEvaluation(MaxOf("abc", "bcd"), "bcd")
-    checkEvaluation(MaxOf(Array(1.toByte, 2.toByte), Array(1.toByte, 3.toByte)),
-      Array(1.toByte, 3.toByte))
-  }
-
-  test("MinOf basic") {
-    testNumericDataTypes { convert =>
-      val small = Literal(convert(1))
-      val large = Literal(convert(2))
-      checkEvaluation(MinOf(small, large), convert(1))
-      checkEvaluation(MinOf(large, small), convert(1))
-      checkEvaluation(MinOf(Literal.create(null, small.dataType), large), convert(2))
-      checkEvaluation(MinOf(small, Literal.create(null, small.dataType)), convert(1))
-    }
-    checkEvaluation(MinOf(positiveShortLit, negativeShortLit), (negativeShort).toShort)
-    checkEvaluation(MinOf(positiveIntLit, negativeIntLit), negativeInt)
-    checkEvaluation(MinOf(positiveLongLit, negativeLongLit), negativeLong)
-
-    DataTypeTestUtils.ordered.foreach { tpe =>
-      checkConsistencyBetweenInterpretedAndCodegen(MinOf, tpe, tpe)
-    }
-  }
-
-  test("MinOf for atomic type") {
-    checkEvaluation(MinOf(true, false), false)
-    checkEvaluation(MinOf("abc", "bcd"), "abc")
-    checkEvaluation(MinOf(Array(1.toByte, 2.toByte), Array(1.toByte, 3.toByte)),
-      Array(1.toByte, 2.toByte))
-  }
-
   test("pmod") {
     testNumericDataTypes { convert =>
       val left = Literal(convert(7))
@@ -261,8 +211,4 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
     checkEvaluation(Pmod(positiveInt, negativeInt), positiveInt)
     checkEvaluation(Pmod(positiveLong, negativeLong), positiveLong)
   }
-
-  DataTypeTestUtils.numericTypeWithoutDecimal.foreach { tpe =>
-    checkConsistencyBetweenInterpretedAndCodegen(MinOf, tpe, tpe)
-  }
 }