diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala index c7352b3e7ab9c0086ee6bb1acf2cdbcd8ce84097..f68aef708201c67d60e18a8fa63845128f1cb9b6 100644 --- a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala @@ -29,11 +29,11 @@ object LinearRegressionWithElasticNetExample { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("LinearRegressionWithElasticNetExample") val sc = new SparkContext(conf) - val sqlCtx = new SQLContext(sc) + val sqlContext = new SQLContext(sc) // $example on$ // Load training data - val training = sqlCtx.read.format("libsvm") + val training = sqlContext.read.format("libsvm") .load("data/mllib/sample_linear_regression_data.txt") val lr = new LinearRegression() diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala index 04c60c0c1d0676dce166a3c77eb51fac31900dbd..89c5edf1ace9c72213a37160226e8654511e6b98 100644 --- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala @@ -30,11 +30,11 @@ object LogisticRegressionSummaryExample { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("LogisticRegressionSummaryExample") val sc = new SparkContext(conf) - val sqlCtx = new SQLContext(sc) - import sqlCtx.implicits._ + val sqlContext = new SQLContext(sc) + import sqlContext.implicits._ // Load training data - val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") + val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") val lr = new LogisticRegression() .setMaxIter(10) diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala index f632960f26ae51a46404d44513bb58c32c4ac9e9..6e27571f1dc1640340fd9f5f35744517e161affe 100644 --- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala @@ -29,11 +29,11 @@ object LogisticRegressionWithElasticNetExample { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("LogisticRegressionWithElasticNetExample") val sc = new SparkContext(conf) - val sqlCtx = new SQLContext(sc) + val sqlContext = new SQLContext(sc) // $example on$ // Load training data - val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") + val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") val lr = new LogisticRegression() .setMaxIter(10)