diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/MinMaxScaler.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/MinMaxScaler.scala
index 9f3d2ca6db0c1eb0f29f564402abcb4cb61e3c9f..28cbe1cb01e9ae069da9782a59d8547f61065315 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/MinMaxScaler.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/MinMaxScaler.scala
@@ -186,8 +186,10 @@ class MinMaxScalerModel private[ml] (
       val size = values.length
       var i = 0
       while (i < size) {
-        val raw = if (originalRange(i) != 0) (values(i) - minArray(i)) / originalRange(i) else 0.5
-        values(i) = raw * scale + $(min)
+        if (!values(i).isNaN) {
+          val raw = if (originalRange(i) != 0) (values(i) - minArray(i)) / originalRange(i) else 0.5
+          values(i) = raw * scale + $(min)
+        }
         i += 1
       }
       Vectors.dense(values)
diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/MinMaxScalerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/MinMaxScalerSuite.scala
index 5da84711758c667b23c5ea438e40db37782f698b..9f376b70035c59edceeb23bb68048ee8c3be1008 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/MinMaxScalerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/MinMaxScalerSuite.scala
@@ -90,4 +90,31 @@ class MinMaxScalerSuite extends SparkFunSuite with MLlibTestSparkContext with De
     assert(newInstance.originalMin === instance.originalMin)
     assert(newInstance.originalMax === instance.originalMax)
   }
+
+  test("MinMaxScaler should remain NaN value") {
+    val data = Array(
+      Vectors.dense(1, Double.NaN, 2.0, 2.0),
+      Vectors.dense(2, 2.0, 0.0, 3.0),
+      Vectors.dense(3, Double.NaN, 0.0, 1.0),
+      Vectors.dense(6, 2.0, 2.0, Double.NaN))
+
+    val expected: Array[Vector] = Array(
+      Vectors.dense(-5.0, Double.NaN, 5.0, 0.0),
+      Vectors.dense(-3.0, 0.0, -5.0, 5.0),
+      Vectors.dense(-1.0, Double.NaN, -5.0, -5.0),
+      Vectors.dense(5.0, 0.0, 5.0, Double.NaN))
+
+    val df = spark.createDataFrame(data.zip(expected)).toDF("features", "expected")
+    val scaler = new MinMaxScaler()
+      .setInputCol("features")
+      .setOutputCol("scaled")
+      .setMin(-5)
+      .setMax(5)
+
+    val model = scaler.fit(df)
+    model.transform(df).select("expected", "scaled").collect()
+      .foreach { case Row(vector1: Vector, vector2: Vector) =>
+        assert(vector1.equals(vector2), "Transformed vector is different with expected.")
+      }
+  }
 }