From 3a43ae7c0bbce8eda98f50a97a0138f860197a98 Mon Sep 17 00:00:00 2001
From: sueann <sueann@databricks.com>
Date: Fri, 10 Feb 2017 11:50:23 -0800
Subject: [PATCH] [SPARK-18613][ML] make spark.mllib LDA dependencies in
 spark.ml LDA private

## What changes were proposed in this pull request?
spark.ml.*LDAModel classes were exposing spark.mllib LDA models via protected methods. Made them package (clustering) private.

## How was this patch tested?
```
build/sbt doc  # "millib.clustering" no longer appears in the docs for *LDA* classes
build/sbt compile  # compiles
build/sbt
> mllib/testOnly   # tests pass
```

Author: sueann <sueann@databricks.com>

Closes #16860 from sueann/SPARK-18613.
---
 .../scala/org/apache/spark/ml/clustering/LDA.scala   | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
index 03f4ac5b28..bbcef3502d 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
@@ -418,11 +418,11 @@ abstract class LDAModel private[ml] (
    * If this model was produced by EM, then this local representation may be built lazily.
    */
   @Since("1.6.0")
-  protected def oldLocalModel: OldLocalLDAModel
+  private[clustering] def oldLocalModel: OldLocalLDAModel
 
   /** Returns underlying spark.mllib model, which may be local or distributed */
   @Since("1.6.0")
-  protected def getModel: OldLDAModel
+  private[clustering] def getModel: OldLDAModel
 
   private[ml] def getEffectiveDocConcentration: Array[Double] = getModel.docConcentration.toArray
 
@@ -563,7 +563,7 @@ abstract class LDAModel private[ml] (
 class LocalLDAModel private[ml] (
     uid: String,
     vocabSize: Int,
-    @Since("1.6.0") override protected val oldLocalModel: OldLocalLDAModel,
+    @Since("1.6.0") override private[clustering] val oldLocalModel: OldLocalLDAModel,
     sparkSession: SparkSession)
   extends LDAModel(uid, vocabSize, sparkSession) {
 
@@ -573,7 +573,7 @@ class LocalLDAModel private[ml] (
     copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel]
   }
 
-  override protected def getModel: OldLDAModel = oldLocalModel
+  override private[clustering] def getModel: OldLDAModel = oldLocalModel
 
   @Since("1.6.0")
   override def isDistributed: Boolean = false
@@ -656,14 +656,14 @@ class DistributedLDAModel private[ml] (
     private var oldLocalModelOption: Option[OldLocalLDAModel])
   extends LDAModel(uid, vocabSize, sparkSession) {
 
-  override protected def oldLocalModel: OldLocalLDAModel = {
+  override private[clustering] def oldLocalModel: OldLocalLDAModel = {
     if (oldLocalModelOption.isEmpty) {
       oldLocalModelOption = Some(oldDistributedModel.toLocal)
     }
     oldLocalModelOption.get
   }
 
-  override protected def getModel: OldLDAModel = oldDistributedModel
+  override private[clustering] def getModel: OldLDAModel = oldDistributedModel
 
   /**
    * Convert this distributed model to a local representation.  This discards info about the
-- 
GitLab