diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JacksonParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JacksonParser.scala
index f1a66c84fc96afccb6ab8d07af05d05e6b924de6..b8fd3b9cc150e6c8b0d58b92c0658bf7eadc698c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/json/JacksonParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JacksonParser.scala
@@ -125,7 +125,7 @@ private[sql] object JacksonParser {
         convertMap(factory, parser, kt)
 
       case (_, udt: UserDefinedType[_]) =>
-        udt.deserialize(convertField(factory, parser, udt.sqlType))
+        convertField(factory, parser, udt.sqlType)
     }
   }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index f29935224e5b6f4dd49e303dad3a720a2e401143..9181222f6922b34899903fba3f70499c58a308b6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -138,4 +138,24 @@ class UserDefinedTypeSuite extends QueryTest {
     val actual = openHashSetUDT.deserialize(openHashSetUDT.serialize(set))
     assert(actual.iterator.toSet === set.iterator.toSet)
   }
+
+  test("UDTs with JSON") {
+    val data = Seq(
+      "{\"id\":1,\"vec\":[1.1,2.2,3.3,4.4]}",
+      "{\"id\":2,\"vec\":[2.25,4.5,8.75]}"
+    )
+    val schema = StructType(Seq(
+      StructField("id", IntegerType, false),
+      StructField("vec", new MyDenseVectorUDT, false)
+    ))
+
+    val stringRDD = ctx.sparkContext.parallelize(data)
+    val jsonRDD = ctx.read.schema(schema).json(stringRDD)
+    checkAnswer(
+      jsonRDD,
+      Row(1, new MyDenseVector(Array(1.1, 2.2, 3.3, 4.4))) ::
+        Row(2, new MyDenseVector(Array(2.25, 4.5, 8.75))) ::
+        Nil
+    )
+  }
 }