File tree Expand file tree Collapse file tree 1 file changed +6
-10
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution/columnar Expand file tree Collapse file tree 1 file changed +6
-10
lines changed Original file line number Diff line number Diff line change @@ -46,8 +46,7 @@ case class InMemoryTableScanExec(
4646 override val supportCodegen : Boolean = {
4747 // In the initial implementation, for ease of review
4848 // support only primitive data types and # of fields is less than wholeStageMaxNumFields
49- val schema = StructType .fromAttributes(relation.output)
50- schema.fields.find(f => f.dataType match {
49+ relation.schema.fields.find(f => f.dataType match {
5150 case BooleanType | ByteType | ShortType | IntegerType | LongType |
5251 FloatType | DoubleType => false
5352 case _ => true
@@ -78,14 +77,11 @@ case class InMemoryTableScanExec(
7877 }
7978
8079 override def inputRDDs (): Seq [RDD [InternalRow ]] = {
81- if (supportCodegen) {
82- val buffers = relation.cachedColumnBuffers
83- // HACK ALERT: This is actually an RDD[ColumnarBatch].
84- // We're taking advantage of Scala's type erasure here to pass these batches along.
85- Seq (buffers.map(createAndDecompressColumn(_)).asInstanceOf [RDD [InternalRow ]])
86- } else {
87- Seq ()
88- }
80+ assert(supportCodegen)
81+ val buffers = relation.cachedColumnBuffers
82+ // HACK ALERT: This is actually an RDD[ColumnarBatch].
83+ // We're taking advantage of Scala's type erasure here to pass these batches along.
84+ Seq (buffers.map(createAndDecompressColumn(_)).asInstanceOf [RDD [InternalRow ]])
8985 }
9086
9187 override def output : Seq [Attribute ] = attributes
You can’t perform that action at this time.
0 commit comments