Skip to content
Snippets Groups Projects
Commit 6bf9352f authored by Cheng Lian's avatar Cheng Lian
Browse files

[MINOR] [SQL] Fixes variable name typo

<!-- Reviewable:start -->
[<img src="https://reviewable.io/review_button.png" height=40 alt="Review on Reviewable"/>](https://reviewable.io/reviews/apache/spark/6038)
<!-- Reviewable:end -->

Author: Cheng Lian <lian@databricks.com>

Closes #6038 from liancheng/fix-typo and squashes the following commits:

572c2a4 [Cheng Lian] Fixes variable name typo
parent d7a37bca
No related branches found
No related tags found
No related merge requests found
...@@ -26,7 +26,7 @@ import org.apache.spark.util.Utils ...@@ -26,7 +26,7 @@ import org.apache.spark.util.Utils
class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll { class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
import caseInsensisitiveContext._ import caseInsensitiveContext._
var path: File = null var path: File = null
......
...@@ -64,7 +64,7 @@ case class SimpleDDLScan(from: Int, to: Int, table: String)(@transient val sqlCo ...@@ -64,7 +64,7 @@ case class SimpleDDLScan(from: Int, to: Int, table: String)(@transient val sqlCo
} }
class DDLTestSuite extends DataSourceTest { class DDLTestSuite extends DataSourceTest {
import caseInsensisitiveContext._ import caseInsensitiveContext._
before { before {
sql( sql(
......
...@@ -24,7 +24,7 @@ import org.scalatest.BeforeAndAfter ...@@ -24,7 +24,7 @@ import org.scalatest.BeforeAndAfter
abstract class DataSourceTest extends QueryTest with BeforeAndAfter { abstract class DataSourceTest extends QueryTest with BeforeAndAfter {
// We want to test some edge cases. // We want to test some edge cases.
implicit val caseInsensisitiveContext = new SQLContext(TestSQLContext.sparkContext) implicit val caseInsensitiveContext = new SQLContext(TestSQLContext.sparkContext)
caseInsensisitiveContext.setConf(SQLConf.CASE_SENSITIVE, "false") caseInsensitiveContext.setConf(SQLConf.CASE_SENSITIVE, "false")
} }
...@@ -97,7 +97,7 @@ object FiltersPushed { ...@@ -97,7 +97,7 @@ object FiltersPushed {
class FilteredScanSuite extends DataSourceTest { class FilteredScanSuite extends DataSourceTest {
import caseInsensisitiveContext._ import caseInsensitiveContext._
before { before {
sql( sql(
......
...@@ -26,7 +26,7 @@ import org.apache.spark.util.Utils ...@@ -26,7 +26,7 @@ import org.apache.spark.util.Utils
class InsertSuite extends DataSourceTest with BeforeAndAfterAll { class InsertSuite extends DataSourceTest with BeforeAndAfterAll {
import caseInsensisitiveContext._ import caseInsensitiveContext._
var path: File = null var path: File = null
......
...@@ -52,7 +52,7 @@ case class SimplePrunedScan(from: Int, to: Int)(@transient val sqlContext: SQLCo ...@@ -52,7 +52,7 @@ case class SimplePrunedScan(from: Int, to: Int)(@transient val sqlContext: SQLCo
} }
class PrunedScanSuite extends DataSourceTest { class PrunedScanSuite extends DataSourceTest {
import caseInsensisitiveContext._ import caseInsensitiveContext._
before { before {
sql( sql(
......
...@@ -27,7 +27,7 @@ import org.apache.spark.util.Utils ...@@ -27,7 +27,7 @@ import org.apache.spark.util.Utils
class SaveLoadSuite extends DataSourceTest with BeforeAndAfterAll { class SaveLoadSuite extends DataSourceTest with BeforeAndAfterAll {
import caseInsensisitiveContext._ import caseInsensitiveContext._
var originalDefaultSource: String = null var originalDefaultSource: String = null
......
...@@ -88,7 +88,7 @@ case class AllDataTypesScan( ...@@ -88,7 +88,7 @@ case class AllDataTypesScan(
} }
class TableScanSuite extends DataSourceTest { class TableScanSuite extends DataSourceTest {
import caseInsensisitiveContext._ import caseInsensitiveContext._
var tableWithSchemaExpected = (1 to 10).map { i => var tableWithSchemaExpected = (1 to 10).map { i =>
Row( Row(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment