diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ForeachSink.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ForeachSink.scala
index de09fb568d2a651afab9ad9eab85ca85068cc26d..2cc54107f8b8392960f4094166848583c15c503e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ForeachSink.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ForeachSink.scala
@@ -63,4 +63,6 @@ class ForeachSink[T : Encoder](writer: ForeachWriter[T]) extends Sink with Seria
       }
     }
   }
+
+  override def toString(): String = "ForeachSink"
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/console.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/console.scala
index 3baea6376069fb2a9d064959cf2838bd8c09fa5a..1c9284e252bd67465ff1d4c8e5a60a9647ba39e3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/console.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/console.scala
@@ -52,6 +52,8 @@ class ConsoleSink(options: Map[String, String]) extends Sink with Logging {
       data.sparkSession.sparkContext.parallelize(data.collect()), data.schema)
       .show(numRowsToShow, isTruncated)
   }
+
+  override def toString(): String = s"ConsoleSink[numRows=$numRowsToShow, truncate=$isTruncated]"
 }
 
 case class ConsoleRelation(override val sqlContext: SQLContext, data: DataFrame)