Skip to content
Snippets Groups Projects
Commit 9baac56c authored by Reynold Xin's avatar Reynold Xin
Browse files
parent 5c78be7a
No related branches found
No related tags found
No related merge requests found
...@@ -83,17 +83,17 @@ private[sql] class DataFrameImpl protected[sql]( ...@@ -83,17 +83,17 @@ private[sql] class DataFrameImpl protected[sql](
protected[sql] def resolve(colName: String): NamedExpression = { protected[sql] def resolve(colName: String): NamedExpression = {
queryExecution.analyzed.resolve(colName, sqlContext.analyzer.resolver).getOrElse { queryExecution.analyzed.resolve(colName, sqlContext.analyzer.resolver).getOrElse {
throw new RuntimeException( throw new AnalysisException(
s"""Cannot resolve column name "$colName" among (${schema.fieldNames.mkString(", ")})""") s"""Cannot resolve column name "$colName" among (${schema.fieldNames.mkString(", ")})""")
} }
} }
protected[sql] def numericColumns(): Seq[Expression] = { protected[sql] def numericColumns: Seq[Expression] = {
schema.fields.filter(_.dataType.isInstanceOf[NumericType]).map { n => schema.fields.filter(_.dataType.isInstanceOf[NumericType]).map { n =>
queryExecution.analyzed.resolve(n.name, sqlContext.analyzer.resolver).get queryExecution.analyzed.resolve(n.name, sqlContext.analyzer.resolver).get
} }
} }
override def toDF(colNames: String*): DataFrame = { override def toDF(colNames: String*): DataFrame = {
require(schema.size == colNames.size, require(schema.size == colNames.size,
"The number of columns doesn't match.\n" + "The number of columns doesn't match.\n" +
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
package org.apache.spark.sql package org.apache.spark.sql
import scala.language.implicitConversions
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import scala.language.implicitConversions
import org.apache.spark.sql.catalyst.analysis.Star import org.apache.spark.sql.catalyst.analysis.Star
import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions._
...@@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.plans.logical.Aggregate ...@@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.plans.logical.Aggregate
import org.apache.spark.sql.types.NumericType import org.apache.spark.sql.types.NumericType
/** /**
* A set of methods for aggregations on a [[DataFrame]], created by [[DataFrame.groupBy]]. * A set of methods for aggregations on a [[DataFrame]], created by [[DataFrame.groupBy]].
*/ */
...@@ -48,13 +47,13 @@ class GroupedData protected[sql](df: DataFrameImpl, groupingExprs: Seq[Expressio ...@@ -48,13 +47,13 @@ class GroupedData protected[sql](df: DataFrameImpl, groupingExprs: Seq[Expressio
// No columns specified. Use all numeric columns. // No columns specified. Use all numeric columns.
df.numericColumns df.numericColumns
} else { } else {
// Make sure all specified columns are numeric // Make sure all specified columns are numeric.
colNames.map { colName => colNames.map { colName =>
val namedExpr = df.resolve(colName) val namedExpr = df.resolve(colName)
if (!namedExpr.dataType.isInstanceOf[NumericType]) { if (!namedExpr.dataType.isInstanceOf[NumericType]) {
throw new AnalysisException( throw new AnalysisException(
s""""$colName" is not a numeric column. """ + s""""$colName" is not a numeric column. """ +
"Aggregation function can only be performed on a numeric column.") "Aggregation function can only be applied on a numeric column.")
} }
namedExpr namedExpr
} }
...@@ -64,7 +63,7 @@ class GroupedData protected[sql](df: DataFrameImpl, groupingExprs: Seq[Expressio ...@@ -64,7 +63,7 @@ class GroupedData protected[sql](df: DataFrameImpl, groupingExprs: Seq[Expressio
Alias(a, a.toString)() Alias(a, a.toString)()
} }
} }
private[this] def strToExpr(expr: String): (Expression => Expression) = { private[this] def strToExpr(expr: String): (Expression => Expression) = {
expr.toLowerCase match { expr.toLowerCase match {
case "avg" | "average" | "mean" => Average case "avg" | "average" | "mean" => Average
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment