diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala index 397abc7391ec65efe3022c8caed8fd3e2dd1818a..dda822d05485beda62681e36ceadd62219f78d3d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala @@ -29,7 +29,7 @@ import org.apache.spark.sql.types._ * the layout of intermediate tuples, BindReferences should be run after all such transformations. */ case class BoundReference(ordinal: Int, dataType: DataType, nullable: Boolean) - extends LeafExpression with NamedExpression { + extends LeafExpression { override def toString: String = s"input[$ordinal, ${dataType.simpleString}]" @@ -58,16 +58,6 @@ case class BoundReference(ordinal: Int, dataType: DataType, nullable: Boolean) } } - override def name: String = s"i[$ordinal]" - - override def toAttribute: Attribute = throw new UnsupportedOperationException - - override def qualifiers: Seq[String] = throw new UnsupportedOperationException - - override def exprId: ExprId = throw new UnsupportedOperationException - - override def newInstance(): NamedExpression = this - override def genCode(ctx: CodegenContext, ev: ExprCode): String = { val javaType = ctx.javaType(dataType) val value = ctx.getValue(ctx.INPUT_ROW, dataType, ordinal.toString)