diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index fa4b8c4868f613607f4bea698f48fe475462f021..64f68c9e9e30271b5011ed3ce7cc5ad78354ec07 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -107,7 +107,7 @@ statement
     | SHOW TBLPROPERTIES table=tableIdentifier
         ('(' key=tablePropertyKey ')')?                                #showTblProperties
     | SHOW FUNCTIONS (LIKE? (qualifiedName | pattern=STRING))?         #showFunctions
-    | (DESC | DESCRIBE) FUNCTION EXTENDED? qualifiedName               #describeFunction
+    | (DESC | DESCRIBE) FUNCTION EXTENDED? describeFuncName            #describeFunction
     | (DESC | DESCRIBE) option=(EXTENDED | FORMATTED)?
         tableIdentifier partitionSpec? describeColName?                #describeTable
     | (DESC | DESCRIBE) DATABASE EXTENDED? identifier                  #describeDatabase
@@ -220,6 +220,14 @@ partitionVal
     : identifier (EQ constant)?
     ;
 
+describeFuncName
+    : qualifiedName
+    | STRING
+    | comparisonOperator
+    | arithmeticOperator
+    | predicateOperator
+    ;
+
 describeColName
     : identifier ('.' (identifier | STRING))*
     ;
@@ -519,6 +527,14 @@ comparisonOperator
     : EQ | NEQ | NEQJ | LT | LTE | GT | GTE | NSEQ
     ;
 
+arithmeticOperator
+    : PLUS | MINUS | ASTERISK | SLASH | PERCENT | DIV | TILDE | AMPERSAND | PIPE | HAT
+    ;
+
+predicateOperator
+    : OR | AND | IN | NOT
+    ;
+
 booleanValue
     : TRUE | FALSE
     ;
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index dd3577063f3c2e3808e9b05af4785ba5405c830f..1bada2ce67ea2446710124ed082b8dda7bd3355d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -363,6 +363,7 @@ object FunctionRegistry {
     expression[Not]("not"),
     expression[Or]("or"),
 
+    // comparison operators
     expression[EqualNullSafe]("<=>"),
     expression[EqualTo]("="),
     expression[EqualTo]("=="),
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 3d7b888d722dc3ede1d2dc28be82490488de7f9b..7f98c21af21f6f02aea509e1f1f1a310e9ff0731 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -112,8 +112,16 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
    * Create a plan for a DESCRIBE FUNCTION command.
    */
   override def visitDescribeFunction(ctx: DescribeFunctionContext): LogicalPlan = withOrigin(ctx) {
-    val functionName = ctx.qualifiedName().identifier().asScala.map(_.getText).mkString(".")
-    DescribeFunction(functionName, ctx.EXTENDED != null)
+    import ctx._
+    val functionName =
+      if (describeFuncName.STRING() != null) {
+        string(describeFuncName.STRING())
+      } else if (describeFuncName.qualifiedName() != null) {
+        describeFuncName.qualifiedName().identifier().asScala.map(_.getText).mkString(".")
+      } else {
+        describeFuncName.getText
+      }
+    DescribeFunction(functionName, EXTENDED != null)
   }
 
   /**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 9ffffa0bdd6e7e1b7a99a4e43a38f3523e4f5cea..e601ff1e358aa982f6fea2c980536331e5ced91a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -716,4 +716,55 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
     }
   }
 
+  test("describe function") {
+    checkAnswer(
+      sql("DESCRIBE FUNCTION log"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.Logarithm") ::
+        Row("Function: log") ::
+        Row("Usage: log(b, x) - Returns the logarithm of x with base b.") :: Nil
+    )
+    // predicate operator
+    checkAnswer(
+      sql("DESCRIBE FUNCTION or"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.Or") ::
+        Row("Function: or") ::
+        Row("Usage: a or b - Logical OR.") :: Nil
+    )
+    checkAnswer(
+      sql("DESCRIBE FUNCTION !"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.Not") ::
+        Row("Function: !") ::
+        Row("Usage: ! a - Logical not") :: Nil
+    )
+    // arithmetic operators
+    checkAnswer(
+      sql("DESCRIBE FUNCTION +"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.Add") ::
+        Row("Function: +") ::
+        Row("Usage: a + b - Returns a+b.") :: Nil
+    )
+    // comparison operators
+    checkAnswer(
+      sql("DESCRIBE FUNCTION <"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.LessThan") ::
+        Row("Function: <") ::
+        Row("Usage: a < b - Returns TRUE if a is less than b.") :: Nil
+    )
+    // STRING
+    checkAnswer(
+      sql("DESCRIBE FUNCTION 'concat'"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.Concat") ::
+        Row("Function: concat") ::
+        Row("Usage: concat(str1, str2, ..., strN) " +
+          "- Returns the concatenation of str1, str2, ..., strN") :: Nil
+    )
+    // extended mode
+    checkAnswer(
+      sql("DESCRIBE FUNCTION EXTENDED ^"),
+      Row("Class: org.apache.spark.sql.catalyst.expressions.BitwiseXor") ::
+        Row("Extended Usage:\n> SELECT 3 ^ 5; 2") ::
+        Row("Function: ^") ::
+        Row("Usage: a ^ b - Bitwise exclusive OR.") :: Nil
+    )
+  }
 }