diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index 0205c13aa986d6da39268f0bc23b5dd3aeaac609..ca0000a465c97828f3f3e283cf6c27b9e900da3c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -37,8 +37,9 @@ import org.apache.spark.util.Utils
  * For a [[StructType]] object, one or multiple [[StructField]]s can be extracted by names.
  * If multiple [[StructField]]s are extracted, a [[StructType]] object will be returned.
  * If a provided name does not have a matching field, it will be ignored. For the case
- * of extracting a single StructField, a `null` will be returned.
- * Example:
+ * of extracting a single [[StructField]], a `null` will be returned.
+ *
+ * Scala Example:
  * {{{
  * import org.apache.spark.sql._
  * import org.apache.spark.sql.types._
@@ -53,28 +54,30 @@ import org.apache.spark.util.Utils
  * val singleField = struct("b")
  * // singleField: StructField = StructField(b,LongType,false)
  *
- * // This struct does not have a field called "d". null will be returned.
- * val nonExisting = struct("d")
- * // nonExisting: StructField = null
+ * // If this struct does not have a field called "d", it throws an exception.
+ * struct("d")
+ * // java.lang.IllegalArgumentException: Field "d" does not exist.
+ * //   ...
  *
  * // Extract multiple StructFields. Field names are provided in a set.
  * // A StructType object will be returned.
  * val twoFields = struct(Set("b", "c"))
  * // twoFields: StructType =
- * //   StructType(List(StructField(b,LongType,false), StructField(c,BooleanType,false)))
+ * //   StructType(StructField(b,LongType,false), StructField(c,BooleanType,false))
  *
- * // Any names without matching fields will be ignored.
- * // For the case shown below, "d" will be ignored and
- * // it is treated as struct(Set("b", "c")).
- * val ignoreNonExisting = struct(Set("b", "c", "d"))
- * // ignoreNonExisting: StructType =
- * //   StructType(List(StructField(b,LongType,false), StructField(c,BooleanType,false)))
+ * // Any names without matching fields will throw an exception.
+ * // For the case shown below, an exception is thrown due to "d".
+ * struct(Set("b", "c", "d"))
+ * // java.lang.IllegalArgumentException: Field "d" does not exist.
+ * //    ...
  * }}}
  *
- * A [[org.apache.spark.sql.Row]] object is used as a value of the StructType.
- * Example:
+ * A [[org.apache.spark.sql.Row]] object is used as a value of the [[StructType]].
+ *
+ * Scala Example:
  * {{{
  * import org.apache.spark.sql._
+ * import org.apache.spark.sql.types._
  *
  * val innerStruct =
  *   StructType(
@@ -87,7 +90,6 @@ import org.apache.spark.util.Utils
  *
  * // Create a Row with the schema defined by struct
  * val row = Row(Row(1, 2, true))
- * // row: Row = [[1,2,true]]
  * }}}
  *
  * @since 1.3.0
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index 9a7111aa3b8b035157d30f122eaa6bf12f87c285..badccae16837bc0d306aebd18c59986ff40ecdda 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -97,7 +97,7 @@ class HiveAnalysis(session: SparkSession) extends Rule[LogicalPlan] {
 }
 
 /**
- * Replaces [[SimpleCatalogRelation]] with [[MetastoreRelation]] if its table provider is hive.
+ * Replaces `SimpleCatalogRelation` with [[MetastoreRelation]] if its table provider is hive.
  */
 class FindHiveSerdeTable(session: SparkSession) extends Rule[LogicalPlan] {
   override def apply(plan: LogicalPlan): LogicalPlan = plan transform {