diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala
index 794f53c5abfd04e7137bd9e2612172eb586c2b39..6d6983c4bd419a732c69c85985f58fbe8bee7a8b 100644
--- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala
+++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala
@@ -42,15 +42,12 @@ import org.apache.spark.streaming.scheduler.rate.RateEstimator
  * The spark configuration spark.streaming.kafka.maxRatePerPartition gives the maximum number
  *  of messages
  * per second that each '''partition''' will accept.
- * @param locationStrategy In most cases, pass in [[PreferConsistent]],
+ * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
  *   see [[LocationStrategy]] for more details.
- * @param executorKafkaParams Kafka
- * <a href="http://kafka.apache.org/documentation.html#newconsumerconfigs">
- * configuration parameters</a>.
- *   Requires  "bootstrap.servers" to be set with Kafka broker(s),
- *   NOT zookeeper servers, specified in host1:port1,host2:port2 form.
- * @param consumerStrategy In most cases, pass in [[Subscribe]],
+ * @param consumerStrategy In most cases, pass in [[ConsumerStrategies.Subscribe]],
  *   see [[ConsumerStrategy]] for more details
+ * @param ppc configuration of settings such as max rate on a per-partition basis.
+ *   see [[PerPartitionConfig]] for more details.
  * @tparam K type of Kafka message key
  * @tparam V type of Kafka message value
  */
diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala
index 98394251bb2335671562dbc0af2de9dbc34756cc..8f3809520811793814641688b5156bcc043f060f 100644
--- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala
+++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala
@@ -41,8 +41,8 @@ import org.apache.spark.storage.StorageLevel
  * with Kafka broker(s) specified in host1:port1,host2:port2 form.
  * @param offsetRanges offset ranges that define the Kafka data belonging to this RDD
  * @param preferredHosts map from TopicPartition to preferred host for processing that partition.
- * In most cases, use [[DirectKafkaInputDStream.preferConsistent]]
- * Use [[DirectKafkaInputDStream.preferBrokers]] if your executors are on same nodes as brokers.
+ * In most cases, use [[LocationStrategies.PreferConsistent]]
+ * Use [[LocationStrategies.PreferBrokers]] if your executors are on same nodes as brokers.
  * @param useConsumerCache whether to use a consumer from a per-jvm cache
  * @tparam K type of Kafka message key
  * @tparam V type of Kafka message value
diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala
index c11917f59d5b8e287f58dc3eae269a25aa5bcc7c..37046329e56174e95841c0741de9a4365d96e696 100644
--- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala
+++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala
@@ -48,7 +48,7 @@ object KafkaUtils extends Logging {
    * configuration parameters</a>. Requires "bootstrap.servers" to be set
    * with Kafka broker(s) specified in host1:port1,host2:port2 form.
    * @param offsetRanges offset ranges that define the Kafka data belonging to this RDD
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
    * @tparam K type of Kafka message key
    * @tparam V type of Kafka message value
@@ -80,14 +80,12 @@ object KafkaUtils extends Logging {
    * Java constructor for a batch-oriented interface for consuming from Kafka.
    * Starting and ending offsets are specified in advance,
    * so that you can control exactly-once semantics.
-   * @param keyClass Class of the keys in the Kafka records
-   * @param valueClass Class of the values in the Kafka records
    * @param kafkaParams Kafka
    * <a href="http://kafka.apache.org/documentation.html#newconsumerconfigs">
    * configuration parameters</a>. Requires "bootstrap.servers" to be set
    * with Kafka broker(s) specified in host1:port1,host2:port2 form.
    * @param offsetRanges offset ranges that define the Kafka data belonging to this RDD
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
    * @tparam K type of Kafka message key
    * @tparam V type of Kafka message value
@@ -110,9 +108,9 @@ object KafkaUtils extends Logging {
    * The spark configuration spark.streaming.kafka.maxRatePerPartition gives the maximum number
    *  of messages
    * per second that each '''partition''' will accept.
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
-   * @param consumerStrategy In most cases, pass in ConsumerStrategies.subscribe,
+   * @param consumerStrategy In most cases, pass in [[ConsumerStrategies.Subscribe]],
    *   see [[ConsumerStrategies]] for more details
    * @tparam K type of Kafka message key
    * @tparam V type of Kafka message value
@@ -131,9 +129,9 @@ object KafkaUtils extends Logging {
    * :: Experimental ::
    * Scala constructor for a DStream where
    * each given Kafka topic/partition corresponds to an RDD partition.
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
-   * @param consumerStrategy In most cases, pass in ConsumerStrategies.subscribe,
+   * @param consumerStrategy In most cases, pass in [[ConsumerStrategies.Subscribe]],
    *   see [[ConsumerStrategies]] for more details.
    * @param perPartitionConfig configuration of settings such as max rate on a per-partition basis.
    *   see [[PerPartitionConfig]] for more details.
@@ -154,11 +152,9 @@ object KafkaUtils extends Logging {
    * :: Experimental ::
    * Java constructor for a DStream where
    * each given Kafka topic/partition corresponds to an RDD partition.
-   * @param keyClass Class of the keys in the Kafka records
-   * @param valueClass Class of the values in the Kafka records
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
-   * @param consumerStrategy In most cases, pass in ConsumerStrategies.subscribe,
+   * @param consumerStrategy In most cases, pass in [[ConsumerStrategies.Subscribe]],
    *   see [[ConsumerStrategies]] for more details
    * @tparam K type of Kafka message key
    * @tparam V type of Kafka message value
@@ -178,11 +174,9 @@ object KafkaUtils extends Logging {
    * :: Experimental ::
    * Java constructor for a DStream where
    * each given Kafka topic/partition corresponds to an RDD partition.
-   * @param keyClass Class of the keys in the Kafka records
-   * @param valueClass Class of the values in the Kafka records
-   * @param locationStrategy In most cases, pass in LocationStrategies.preferConsistent,
+   * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]],
    *   see [[LocationStrategies]] for more details.
-   * @param consumerStrategy In most cases, pass in ConsumerStrategies.subscribe,
+   * @param consumerStrategy In most cases, pass in [[ConsumerStrategies.Subscribe]],
    *   see [[ConsumerStrategies]] for more details
    * @param perPartitionConfig configuration of settings such as max rate on a per-partition basis.
    *   see [[PerPartitionConfig]] for more details.