Skip to content

Commit a7e025c

Browse files
author
Davies Liu
committed
move InternalRow into catalyst
1 parent 30db8ba commit a7e025c

File tree

56 files changed

+302
-265
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+302
-265
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/BaseRow.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import scala.collection.Seq;
2626
import scala.collection.mutable.ArraySeq;
2727

28+
import org.apache.spark.sql.catalyst.InternalRow;
2829
import org.apache.spark.sql.catalyst.expressions.GenericRow;
2930
import org.apache.spark.sql.types.StructType;
3031

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeFixedWidthAggregationMap.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import java.util.Arrays;
2121
import java.util.Iterator;
2222

23-
import org.apache.spark.sql.InternalRow;
23+
import org.apache.spark.sql.catalyst.InternalRow;
2424
import org.apache.spark.sql.types.StructField;
2525
import org.apache.spark.sql.types.StructType;
2626
import org.apache.spark.unsafe.PlatformDependent;

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
import scala.collection.Seq;
2727
import scala.collection.mutable.ArraySeq;
2828

29-
import org.apache.spark.sql.InternalRow;
29+
import org.apache.spark.sql.catalyst.InternalRow;
3030
import org.apache.spark.sql.BaseMutableRow;
3131
import org.apache.spark.sql.types.DataType;
3232
import org.apache.spark.sql.types.StructType;

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import scala.collection.mutable.HashMap
2727

2828
import org.apache.spark.sql.catalyst.expressions._
2929
import org.apache.spark.sql.catalyst.util.DateUtils
30-
import org.apache.spark.sql.{InternalRow, Row}
30+
import org.apache.spark.sql.Row
3131
import org.apache.spark.sql.types._
3232
import org.apache.spark.unsafe.types.UTF8String
3333

@@ -284,7 +284,8 @@ object CatalystTypeConverters {
284284
override def toScala(catalystValue: Any): Timestamp =
285285
if (catalystValue == null) null
286286
else DateUtils.toJavaTimestamp(catalystValue.asInstanceOf[Long])
287-
override def toScalaImpl(row: Row, column: Int): Timestamp = toScala(row.getLong(column))
287+
override def toScalaImpl(row: InternalRow, column: Int): Timestamp =
288+
toScala(row.getLong(column))
288289
}
289290

290291
private object BigDecimalConverter extends CatalystTypeConverter[Any, JavaBigDecimal, Decimal] {

sql/catalyst/src/main/scala/org/apache/spark/sql/InternalRow.scala renamed to sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
package org.apache.spark.sql
1+
package org.apache.spark.sql.catalyst
22

3+
import org.apache.spark.sql.Row
34
import org.apache.spark.sql.catalyst.expressions.GenericRow
45

56
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.analysis
1919

20+
import org.apache.spark.sql.catalyst
2021
import org.apache.spark.sql.catalyst.{errors, trees}
2122
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2223
import org.apache.spark.sql.catalyst.expressions._
@@ -67,7 +68,7 @@ case class UnresolvedAttribute(nameParts: Seq[String])
6768
override def withName(newName: String): UnresolvedAttribute = UnresolvedAttribute.quoted(newName)
6869

6970
// Unresolved attributes are transient at compile time and don't get evaluated during execution.
70-
override def eval(input: InternalRow = null): Any =
71+
override def eval(input: catalyst.InternalRow = null): Any =
7172
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
7273

7374
override def toString: String = s"'$name"
@@ -85,7 +86,7 @@ case class UnresolvedFunction(name: String, children: Seq[Expression]) extends E
8586
override lazy val resolved = false
8687

8788
// Unresolved functions are transient at compile time and don't get evaluated during execution.
88-
override def eval(input: InternalRow = null): Any =
89+
override def eval(input: catalyst.InternalRow = null): Any =
8990
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
9091

9192
override def toString: String = s"'$name(${children.mkString(",")})"
@@ -107,7 +108,7 @@ trait Star extends NamedExpression with trees.LeafNode[Expression] {
107108
override lazy val resolved = false
108109

109110
// Star gets expanded at runtime so we never evaluate a Star.
110-
override def eval(input: InternalRow = null): Any =
111+
override def eval(input: catalyst.InternalRow = null): Any =
111112
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
112113

113114
def expand(input: Seq[Attribute], resolver: Resolver): Seq[NamedExpression]
@@ -166,7 +167,7 @@ case class MultiAlias(child: Expression, names: Seq[String])
166167

167168
override lazy val resolved = false
168169

169-
override def eval(input: InternalRow = null): Any =
170+
override def eval(input: catalyst.InternalRow = null): Any =
170171
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
171172

172173
override def toString: String = s"$child AS $names"
@@ -200,7 +201,7 @@ case class UnresolvedExtractValue(child: Expression, extraction: Expression)
200201
override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
201202
override lazy val resolved = false
202203

203-
override def eval(input: InternalRow = null): Any =
204+
override def eval(input: catalyst.InternalRow = null): Any =
204205
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
205206

206207
override def toString: String = s"$child[$extraction]"

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.Logging
2121
import org.apache.spark.sql.catalyst.errors.attachTree
2222
import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, CodeGenContext}
2323
import org.apache.spark.sql.types._
24-
import org.apache.spark.sql.catalyst.trees
24+
import org.apache.spark.sql.catalyst.{InternalRow, trees}
2525

2626
/**
2727
* A bound reference points to a specific slot in the input tuple, allowing the actual value

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.sql.{Date, Timestamp}
2121
import java.text.{DateFormat, SimpleDateFormat}
2222

2323
import org.apache.spark.Logging
24+
import org.apache.spark.sql.catalyst
2425
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
2526
import org.apache.spark.sql.catalyst.util.DateUtils
2627
import org.apache.spark.sql.types._
@@ -393,7 +394,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
393394
}
394395
// TODO: Could be faster?
395396
val newRow = new GenericMutableRow(from.fields.size)
396-
buildCast[InternalRow](_, row => {
397+
buildCast[catalyst.InternalRow](_, row => {
397398
var i = 0
398399
while (i < row.length) {
399400
val v = row(i)
@@ -425,7 +426,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
425426

426427
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
427428

428-
override def eval(input: InternalRow): Any = {
429+
override def eval(input: catalyst.InternalRow): Any = {
429430
val evaluated = child.eval(input)
430431
if (evaluated == null) null else cast(evaluated)
431432
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions
1919

2020
import scala.collection.Map
2121

22-
import org.apache.spark.sql.AnalysisException
22+
import org.apache.spark.sql.{catalyst, AnalysisException}
2323
import org.apache.spark.sql.catalyst.analysis._
2424
import org.apache.spark.sql.types._
2525

@@ -105,8 +105,8 @@ case class GetStructField(child: Expression, field: StructField, ordinal: Int)
105105
override def foldable: Boolean = child.foldable
106106
override def toString: String = s"$child.${field.name}"
107107

108-
override def eval(input: InternalRow): Any = {
109-
val baseValue = child.eval(input).asInstanceOf[InternalRow]
108+
override def eval(input: catalyst.InternalRow): Any = {
109+
val baseValue = child.eval(input).asInstanceOf[catalyst.InternalRow]
110110
if (baseValue == null) null else baseValue(ordinal)
111111
}
112112
}
@@ -125,8 +125,8 @@ case class GetArrayStructFields(
125125
override def foldable: Boolean = child.foldable
126126
override def toString: String = s"$child.${field.name}"
127127

128-
override def eval(input: InternalRow): Any = {
129-
val baseValue = child.eval(input).asInstanceOf[Seq[InternalRow]]
128+
override def eval(input: catalyst.InternalRow): Any = {
129+
val baseValue = child.eval(input).asInstanceOf[Seq[catalyst.InternalRow]]
130130
if (baseValue == null) null else {
131131
baseValue.map { row =>
132132
if (row == null) null else row(ordinal)
@@ -146,7 +146,7 @@ abstract class ExtractValueWithOrdinal extends ExtractValue {
146146
override def toString: String = s"$child[$ordinal]"
147147
override def children: Seq[Expression] = child :: ordinal :: Nil
148148

149-
override def eval(input: InternalRow): Any = {
149+
override def eval(input: catalyst.InternalRow): Any = {
150150
val value = child.eval(input)
151151
if (value == null) {
152152
null

0 commit comments

Comments
 (0)