Skip to content

Commit 2432d65

Browse files
committed
Support different floating-point Ordering for Scala 2.12 / 2.13
1 parent a60da23 commit 2432d65

File tree

5 files changed

+100
-7
lines changed

5 files changed

+100
-7
lines changed

core/pom.xml

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,12 +26,15 @@
2626
</parent>
2727

2828
<artifactId>spark-core_2.12</artifactId>
29-
<properties>
30-
<sbt.project.name>core</sbt.project.name>
31-
</properties>
3229
<packaging>jar</packaging>
3330
<name>Spark Project Core</name>
3431
<url>http://spark.apache.org/</url>
32+
33+
<properties>
34+
<sbt.project.name>core</sbt.project.name>
35+
<extra.source.dir>src/main/scala-${scala.binary.version}</extra.source.dir>
36+
</properties>
37+
3538
<dependencies>
3639
<dependency>
3740
<groupId>com.thoughtworks.paranamer</groupId>
@@ -516,6 +519,24 @@
516519
</execution>
517520
</executions>
518521
</plugin>
522+
<plugin>
523+
<groupId>org.codehaus.mojo</groupId>
524+
<artifactId>build-helper-maven-plugin</artifactId>
525+
<executions>
526+
<execution>
527+
<id>add-sources</id>
528+
<phase>generate-sources</phase>
529+
<goals>
530+
<goal>add-source</goal>
531+
</goals>
532+
<configuration>
533+
<sources>
534+
<source>${extra.source.dir}</source>
535+
</sources>
536+
</configuration>
537+
</execution>
538+
</executions>
539+
</plugin>
519540
</plugins>
520541
</build>
521542

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.util
19+
20+
/**
21+
* This class only exists to bridge the difference between Scala 2.12 and Scala 2.13's
22+
* support for floating-point ordering. It is implemented separately for both as there
23+
* is no method that exists in both for comparison.
24+
*
25+
* It functions like Ordering.Double in Scala 2.12.
26+
*/
27+
private[spark] object OrderingUtil {
28+
29+
def compareDouble(x: Double, y: Double): Int = Ordering.Double.compare(x, y)
30+
31+
def compareFloat(x: Float, y: Float): Int = Ordering.Float.compare(x, y)
32+
33+
}
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.util
19+
20+
/**
21+
* This class only exists to bridge the difference between Scala 2.12 and Scala 2.13's
22+
* support for floating-point ordering. It is implemented separately for both as there
23+
* is no method that exists in both for comparison.
24+
*
25+
* It functions like Ordering.Double.TotalOrdering in Scala 2.13, which matches java.lang.Double
26+
* rather than Scala 2.12's Ordering.Double in handling of NaN.
27+
*/
28+
private[spark] object OrderingUtil {
29+
30+
def compareDouble(x: Double, y: Double): Int = Ordering.Double.TotalOrdering.compare(x, y)
31+
32+
def compareFloat(x: Float, y: Float): Int = Ordering.Float.TotalOrdering.compare(x, y)
33+
34+
}

core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit
2323

2424
import org.apache.spark.SparkFunSuite
2525
import org.apache.spark.internal.Logging
26+
import org.apache.spark.util.OrderingUtil
2627
import org.apache.spark.util.Utils.timeIt
2728
import org.apache.spark.util.random.XORShiftRandom
2829

@@ -59,7 +60,7 @@ class SorterSuite extends SparkFunSuite with Logging {
5960

6061
Arrays.sort(keys)
6162
new Sorter(new KVArraySortDataFormat[Double, Number])
62-
.sort(keyValueArray, 0, keys.length, Ordering.Double)
63+
.sort(keyValueArray, 0, keys.length, OrderingUtil.compareDouble)
6364

6465
keys.zipWithIndex.foreach { case (k, i) =>
6566
assert(k === keyValueArray(2 * i))

sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import scala.math.Numeric._
2121
import scala.math.Ordering
2222

2323
import org.apache.spark.sql.types.Decimal.DecimalIsConflicted
24-
24+
import org.apache.spark.util.OrderingUtil
2525

2626
object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOrdering {
2727
private def checkOverflow(res: Int, x: Byte, y: Byte, op: String): Unit = {
@@ -118,7 +118,7 @@ object LongExactNumeric extends LongIsIntegral with Ordering.LongOrdering {
118118
}
119119
}
120120

121-
object FloatExactNumeric extends FloatIsFractional with Ordering.FloatOrdering {
121+
object FloatExactNumeric extends FloatIsFractional {
122122
private def overflowException(x: Float, dataType: String) =
123123
throw new ArithmeticException(s"Casting $x to $dataType causes overflow")
124124

@@ -148,9 +148,11 @@ object FloatExactNumeric extends FloatIsFractional with Ordering.FloatOrdering {
148148
overflowException(x, "int")
149149
}
150150
}
151+
152+
override def compare(x: Float, y: Float): Int = OrderingUtil.compareFloat(x, y)
151153
}
152154

153-
object DoubleExactNumeric extends DoubleIsFractional with Ordering.DoubleOrdering {
155+
object DoubleExactNumeric extends DoubleIsFractional {
154156
private def overflowException(x: Double, dataType: String) =
155157
throw new ArithmeticException(s"Casting $x to $dataType causes overflow")
156158

@@ -174,6 +176,8 @@ object DoubleExactNumeric extends DoubleIsFractional with Ordering.DoubleOrderin
174176
overflowException(x, "long")
175177
}
176178
}
179+
180+
override def compare(x: Double, y: Double): Int = OrderingUtil.compareDouble(x, y)
177181
}
178182

179183
object DecimalExactNumeric extends DecimalIsConflicted {

0 commit comments

Comments
 (0)