diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index bd9823bc054241639ea5f50f3fc4deb7e58372a4..5a169488c97eb8a736809137fcd24d9461672dcb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -265,8 +265,15 @@ final class Decimal extends Ordered[Decimal] with Serializable {
 
   def * (that: Decimal): Decimal = Decimal(toBigDecimal * that.toBigDecimal)
 
-  def / (that: Decimal): Decimal =
-    if (that.isZero) null else Decimal(toBigDecimal / that.toBigDecimal)
+  def / (that: Decimal): Decimal = {
+    if (that.isZero) {
+      null
+    } else {
+      // To avoid non-terminating decimal expansion problem, we turn to Java BigDecimal's divide
+      // with specified ROUNDING_MODE.
+      Decimal(toJavaBigDecimal.divide(that.toJavaBigDecimal, ROUNDING_MODE.id))
+    }
+  }
 
   def % (that: Decimal): Decimal =
     if (that.isZero) null else Decimal(toBigDecimal % that.toBigDecimal)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
index ccc29c0dc8c3520fe0579dfb8c6d01f2ebf28e5d..5f312964e5bf715aa7149fc500aefba5d88db5a0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
@@ -167,4 +167,9 @@ class DecimalSuite extends SparkFunSuite with PrivateMethodTester {
     val decimal = (Decimal(Long.MaxValue, 38, 0) * Decimal(Long.MaxValue, 38, 0)).toJavaBigDecimal
     assert(decimal.unscaledValue.toString === "85070591730234615847396907784232501249")
   }
+
+  test("fix non-terminating decimal expansion problem") {
+    val decimal = Decimal(1.0, 10, 3) / Decimal(3.0, 10, 3)
+    assert(decimal.toString === "0.333")
+  }
 }