diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala index c70b5af4aa44877556c482ee25702b8dc0f6a119..0e128d8bdcd9699d24493d07dd2102cb4afa5b57 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala @@ -43,7 +43,7 @@ import org.apache.spark.sql.types._ * * See [[Substring]] for an example. */ -abstract class Expression extends TreeNode[Expression] with Product { +abstract class Expression extends TreeNode[Expression] { /** * Returns true when an expression is a candidate for static evaluation before the query is diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala index 9101f11052218b88456a8c9b7cacdcdfcf6af59c..eb5c065a34123370f2067843d44610110ef0059c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala @@ -77,7 +77,7 @@ abstract class UnaryMathExpression(f: Double => Double, name: String) } abstract class UnaryLogExpression(f: Double => Double, name: String) - extends UnaryMathExpression(f, name) { self: Product => + extends UnaryMathExpression(f, name) { // values less than or equal to yAsymptote eval to null in Hive, instead of NaN or -Infinity protected val yAsymptote: Double = 0.0 diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala index b89e3382f06a9a9bf0d285e5170f754a08ad9569..d06a7a2add75469951be5f98320dff068df740dd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.trees.TreeNode import org.apache.spark.sql.types.{ArrayType, DataType, StructField, StructType} abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] { - self: PlanType with Product => + self: PlanType => def output: Seq[Attribute] diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala index dd6c5d43f5714ee89228cd97fc8e94b68c2b5c85..bedeaf06adf12b8e6fbd5c10b385911983bd2998 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala @@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.trees.TreeNode -abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging with Product{ +abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging { /** * Computes [[Statistics]] for this plan. The default implementation assumes the output diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala index 0f95ca688a7a8939d919228baadad9f5573209eb..122e9fc5ed77f0abe03062392cc62fb31f33e9be 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala @@ -54,8 +54,8 @@ object CurrentOrigin { } } -abstract class TreeNode[BaseType <: TreeNode[BaseType]] { - self: BaseType with Product => +abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product { + self: BaseType => val origin: Origin = CurrentOrigin.get diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala index f363e9947d5f61dc09a8328e76ed5b28eb268116..b0d56b7bf0b863f6cb76394903bd7d269c6d9e9d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala @@ -39,7 +39,7 @@ object SparkPlan { * :: DeveloperApi :: */ @DeveloperApi -abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Product with Serializable { +abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable { /** * A handle to the SQL Context that was used to create this plan. Since many operators need