Skip to content

Commit f2abd13

Browse files
author
Davies Liu
committed
fix scalastyle
1 parent a7e025c commit f2abd13

File tree

14 files changed

+111
-47
lines changed

14 files changed

+111
-47
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,20 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
118
package org.apache.spark.sql.catalyst
219

320
import org.apache.spark.sql.Row
@@ -37,4 +54,4 @@ object InternalRow {
3754

3855
/** Returns an empty row. */
3956
val empty = apply()
40-
}
57+
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverter.scala

Lines changed: 51 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions
1919

20-
import org.apache.spark.sql.catalyst
2120
import org.apache.spark.sql.types._
2221
import org.apache.spark.unsafe.PlatformDependent
2322
import org.apache.spark.unsafe.array.ByteArrayMethods
@@ -49,7 +48,7 @@ class UnsafeRowConverter(fieldTypes: Array[DataType]) {
4948
/**
5049
* Compute the amount of space, in bytes, required to encode the given row.
5150
*/
52-
def getSizeRequirement(row: catalyst.InternalRow): Int = {
51+
def getSizeRequirement(row: InternalRow): Int = {
5352
var fieldNumber = 0
5453
var variableLengthFieldSize: Int = 0
5554
while (fieldNumber < writers.length) {
@@ -69,7 +68,7 @@ class UnsafeRowConverter(fieldTypes: Array[DataType]) {
6968
* @param baseOffset the base offset of the destination address
7069
* @return the number of bytes written. This should be equal to `getSizeRequirement(row)`.
7170
*/
72-
def writeRow(row: catalyst.InternalRow, baseObject: Object, baseOffset: Long): Long = {
71+
def writeRow(row: InternalRow, baseObject: Object, baseOffset: Long): Long = {
7372
unsafeRow.pointTo(baseObject, baseOffset, writers.length, null)
7473
var fieldNumber = 0
7574
var appendCursor: Int = fixedLengthSize
@@ -100,12 +99,12 @@ private abstract class UnsafeColumnWriter {
10099
* used for calculating where variable-length data should be written
101100
* @return the number of variable-length bytes written
102101
*/
103-
def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int
102+
def write(source: InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int
104103

105104
/**
106105
* Return the number of bytes that are needed to write this variable-length value.
107106
*/
108-
def getSize(source: catalyst.InternalRow, column: Int): Int
107+
def getSize(source: InternalRow, column: Int): Int
109108
}
110109

111110
private object UnsafeColumnWriter {
@@ -141,72 +140,108 @@ private object StringUnsafeColumnWriter extends StringUnsafeColumnWriter
141140

142141
private abstract class PrimitiveUnsafeColumnWriter extends UnsafeColumnWriter {
143142
// Primitives don't write to the variable-length region:
144-
def getSize(sourceRow: catalyst.InternalRow, column: Int): Int = 0
143+
def getSize(sourceRow: InternalRow, column: Int): Int = 0
145144
}
146145

147146
private class NullUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
148-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
147+
override def write(
148+
source: InternalRow,
149+
target: UnsafeRow,
150+
column: Int,
151+
appendCursor: Int): Int = {
149152
target.setNullAt(column)
150153
0
151154
}
152155
}
153156

154157
private class BooleanUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
155-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
158+
override def write(
159+
source: InternalRow,
160+
target: UnsafeRow,
161+
column: Int,
162+
appendCursor: Int): Int = {
156163
target.setBoolean(column, source.getBoolean(column))
157164
0
158165
}
159166
}
160167

161168
private class ByteUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
162-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
169+
override def write(
170+
source: InternalRow,
171+
target: UnsafeRow,
172+
column: Int,
173+
appendCursor: Int): Int = {
163174
target.setByte(column, source.getByte(column))
164175
0
165176
}
166177
}
167178

168179
private class ShortUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
169-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
180+
override def write(
181+
source: InternalRow,
182+
target: UnsafeRow,
183+
column: Int,
184+
appendCursor: Int): Int = {
170185
target.setShort(column, source.getShort(column))
171186
0
172187
}
173188
}
174189

175190
private class IntUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
176-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
191+
override def write(
192+
source: InternalRow,
193+
target: UnsafeRow,
194+
column: Int,
195+
appendCursor: Int): Int = {
177196
target.setInt(column, source.getInt(column))
178197
0
179198
}
180199
}
181200

182201
private class LongUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
183-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
202+
override def write(
203+
source: InternalRow,
204+
target: UnsafeRow,
205+
column: Int,
206+
appendCursor: Int): Int = {
184207
target.setLong(column, source.getLong(column))
185208
0
186209
}
187210
}
188211

189212
private class FloatUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
190-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
213+
override def write(
214+
source: InternalRow,
215+
target: UnsafeRow,
216+
column: Int,
217+
appendCursor: Int): Int = {
191218
target.setFloat(column, source.getFloat(column))
192219
0
193220
}
194221
}
195222

196223
private class DoubleUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter {
197-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
224+
override def write(
225+
source: InternalRow,
226+
target: UnsafeRow,
227+
column: Int,
228+
appendCursor: Int): Int = {
198229
target.setDouble(column, source.getDouble(column))
199230
0
200231
}
201232
}
202233

203234
private class StringUnsafeColumnWriter private() extends UnsafeColumnWriter {
204-
def getSize(source: catalyst.InternalRow, column: Int): Int = {
235+
def getSize(source: InternalRow, column: Int): Int = {
205236
val numBytes = source.get(column).asInstanceOf[UTF8String].getBytes.length
206237
8 + ByteArrayMethods.roundNumberOfBytesToNearestWord(numBytes)
207238
}
208239

209-
override def write(source: catalyst.InternalRow, target: UnsafeRow, column: Int, appendCursor: Int): Int = {
240+
override def write(
241+
source: InternalRow,
242+
target: UnsafeRow,
243+
column: Int,
244+
appendCursor: Int): Int = {
210245
val value = source.get(column).asInstanceOf[UTF8String]
211246
val baseObject = target.getBaseObject
212247
val baseOffset = target.getBaseOffset

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,8 @@ class BaseOrdering extends Ordering[catalyst.InternalRow] {
3636
* Generates bytecode for an [[Ordering]] of [[Row Rows]] for a given set of
3737
* [[Expression Expressions]].
3838
*/
39-
object GenerateOrdering extends CodeGenerator[Seq[SortOrder], Ordering[catalyst.InternalRow]] with Logging {
39+
object GenerateOrdering
40+
extends CodeGenerator[Seq[SortOrder], Ordering[catalyst.InternalRow]] with Logging {
4041
import scala.reflect.runtime.universe._
4142

4243
protected def canonicalize(in: Seq[SortOrder]): Seq[SortOrder] =

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ abstract class Predicate {
2828
}
2929

3030
/**
31-
* Generates bytecode that evaluates a boolean [[Expression]] on a given input [[catalyst.InternalRow]].
31+
* Generates bytecode that evaluates a boolean [[Expression]] on a given input [[InternalRow]].
3232
*/
3333
object GeneratePredicate extends CodeGenerator[Expression, (catalyst.InternalRow) => Boolean] {
3434

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions.codegen
1919

20-
import org.apache.spark.sql.{catalyst, BaseMutableRow}
20+
import org.apache.spark.sql.BaseMutableRow
2121
import org.apache.spark.sql.catalyst.expressions._
2222
import org.apache.spark.sql.types._
2323

@@ -27,9 +27,10 @@ import org.apache.spark.sql.types._
2727
abstract class BaseProject extends Projection {}
2828

2929
/**
30-
* Generates bytecode that produces a new [[catalyst.InternalRow]] object based on a fixed set of input
31-
* [[Expression Expressions]] and a given input [[catalyst.InternalRow]]. The returned [[catalyst.InternalRow]] object is custom
32-
* generated based on the output types of the [[Expression]] to avoid boxing of primitive values.
30+
* Generates bytecode that produces a new [[InternalRow]] object based on a fixed set of input
31+
* [[Expression Expressions]] and a given input [[InternalRow]]. The returned [[InternalRow]]
32+
* object is custom generated based on the output types of the [[Expression]] to avoid boxing of
33+
* primitive values.
3334
*/
3435
object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
3536
import scala.reflect.runtime.universe._

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -56,22 +56,22 @@ package object expressions {
5656
val InternalRow = catalyst.InternalRow
5757

5858
/**
59-
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each column of the
60-
* new row. If the schema of the input row is specified, then the given expression will be bound
61-
* to that schema.
59+
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each
60+
* column of the new row. If the schema of the input row is specified, then the given expression
61+
* will be bound to that schema.
6262
*/
6363
abstract class Projection extends (InternalRow => InternalRow)
6464

6565
/**
66-
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each column of the
67-
* new row. If the schema of the input row is specified, then the given expression will be bound
68-
* to that schema.
66+
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each
67+
* column of the new row. If the schema of the input row is specified, then the given expression
68+
* will be bound to that schema.
6969
*
7070
* In contrast to a normal projection, a MutableProjection reuses the same underlying row object
7171
* each time an input row is added. This significantly reduces the cost of calculating the
72-
* projection, but means that it is not safe to hold on to a reference to a [[InternalRow]] after `next()`
73-
* has been called on the [[Iterator]] that produced it. Instead, the user must call `Row.copy()`
74-
* and hold on to the returned [[InternalRow]] before calling `next()`.
72+
* projection, but means that it is not safe to hold on to a reference to a [[InternalRow]] after
73+
* `next()` has been called on the [[Iterator]] that produced it. Instead, the user must call
74+
* `InternalRow.copy()` and hold on to the returned [[InternalRow]] before calling `next()`.
7575
*/
7676
abstract class MutableProjection extends Projection {
7777
def currentValue: InternalRow

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ import org.apache.spark.sql.types.{DataType, StructType, AtomicType}
2121
import org.apache.spark.unsafe.types.UTF8String
2222

2323
/**
24-
* An extended interface to [[InternalRow]] that allows the values for each column to be updated. Setting
25-
* a value through a primitive function implicitly marks that column as not null.
24+
* An extended interface to [[InternalRow]] that allows the values for each column to be updated.
25+
* Setting a value through a primitive function implicitly marks that column as not null.
2626
*/
2727
trait MutableRow extends InternalRow {
2828
def setNullAt(i: Int): Unit

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1231,7 +1231,8 @@ class DataFrame private[sql](
12311231

12321232
// Pivot the data so each summary is one row
12331233
row.grouped(outputCols.size).toSeq.zip(statistics).map {
1234-
case (aggregation, (statistic, _)) => catalyst.InternalRow(statistic :: aggregation.toList: _*)
1234+
case (aggregation, (statistic, _)) =>
1235+
catalyst.InternalRow(statistic :: aggregation.toList: _*)
12351236
}
12361237
} else {
12371238
// If there are no output columns, just output a single column that contains the stats.

sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,9 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
2626
/**
2727
* Physical plan node for scanning data from a local collection.
2828
*/
29-
private[sql] case class LocalTableScan(output: Seq[Attribute], rows: Seq[InternalRow]) extends LeafNode {
29+
private[sql] case class LocalTableScan(
30+
output: Seq[Attribute],
31+
rows: Seq[InternalRow]) extends LeafNode {
3032

3133
private lazy val rdd = sqlContext.sparkContext.parallelize(rows)
3234

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,13 +74,16 @@ case class HashOuterJoin(
7474
@transient private[this] lazy val leftNullRow = new GenericRow(left.output.length)
7575
@transient private[this] lazy val rightNullRow = new GenericRow(right.output.length)
7676
@transient private[this] lazy val boundCondition =
77-
condition.map(newPredicate(_, left.output ++ right.output)).getOrElse((row: InternalRow) => true)
77+
condition.map(
78+
newPredicate(_, left.output ++ right.output)).getOrElse((row: InternalRow) => true)
7879

7980
// TODO we need to rewrite all of the iterators with our own implementation instead of the Scala
8081
// iterator for performance purpose.
8182

8283
private[this] def leftOuterIterator(
83-
key: InternalRow, joinedRow: JoinedRow, rightIter: Iterable[InternalRow]): Iterator[InternalRow] = {
84+
key: InternalRow,
85+
joinedRow: JoinedRow,
86+
rightIter: Iterable[InternalRow]): Iterator[InternalRow] = {
8487
val ret: Iterable[InternalRow] = {
8588
if (!key.anyNull) {
8689
val temp = rightIter.collect {
@@ -99,7 +102,9 @@ case class HashOuterJoin(
99102
}
100103

101104
private[this] def rightOuterIterator(
102-
key: InternalRow, leftIter: Iterable[InternalRow], joinedRow: JoinedRow): Iterator[InternalRow] = {
105+
key: InternalRow,
106+
leftIter: Iterable[InternalRow],
107+
joinedRow: JoinedRow): Iterator[InternalRow] = {
103108

104109
val ret: Iterable[InternalRow] = {
105110
if (!key.anyNull) {
@@ -167,7 +172,8 @@ case class HashOuterJoin(
167172
}
168173

169174
private[this] def buildHashTable(
170-
iter: Iterator[InternalRow], keyGenerator: Projection): JavaHashMap[InternalRow, CompactBuffer[InternalRow]] = {
175+
iter: Iterator[InternalRow],
176+
keyGenerator: Projection): JavaHashMap[InternalRow, CompactBuffer[InternalRow]] = {
171177
val hashTable = new JavaHashMap[InternalRow, CompactBuffer[InternalRow]]()
172178
while (iter.hasNext) {
173179
val currentRow = iter.next()

0 commit comments

Comments
 (0)