Skip to content

Commit a9a1955

Browse files
committed
fix
1 parent 57171fe commit a9a1955

File tree

1 file changed

+15
-2
lines changed

1 file changed

+15
-2
lines changed

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,14 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
2929
import org.apache.spark.sql.catalyst.expressions.GenericRow
3030
import org.apache.spark.sql.catalyst.expressions.aggregate.{Complete, Partial}
3131
import org.apache.spark.sql.catalyst.optimizer.{ConvertToLocalRelation, NestedColumnAliasingSuite}
32-
import org.apache.spark.sql.catalyst.plans.logical.{Project, RepartitionByExpression}
32+
import org.apache.spark.sql.catalyst.plans.logical.{LocalLimit, Project, RepartitionByExpression}
3333
import org.apache.spark.sql.catalyst.util.StringUtils
3434
import org.apache.spark.sql.execution.UnionExec
3535
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
3636
import org.apache.spark.sql.execution.aggregate.{HashAggregateExec, ObjectHashAggregateExec, SortAggregateExec}
3737
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
3838
import org.apache.spark.sql.execution.command.FunctionsCommand
39-
import org.apache.spark.sql.execution.datasources.SchemaColumnConvertNotSupportedException
39+
import org.apache.spark.sql.execution.datasources.{LogicalRelation, SchemaColumnConvertNotSupportedException}
4040
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
4141
import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan
4242
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan
@@ -3943,6 +3943,19 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
39433943
}
39443944
}
39453945
}
3946+
3947+
test("SPARK-26138 Pushdown limit through InnerLike when condition is empty") {
3948+
withTable("t1", "t2") {
3949+
spark.range(5).repartition(1).write.saveAsTable("t1")
3950+
spark.range(5).repartition(1).write.saveAsTable("t2")
3951+
val df = spark.sql("SELECT * FROM t1 CROSS JOIN t2 LIMIT 3")
3952+
val pushedLocalLimits = df.queryExecution.optimizedPlan.collect {
3953+
case l @ LocalLimit(_, _: LogicalRelation) => l
3954+
}
3955+
assert(pushedLocalLimits.length === 2)
3956+
checkAnswer(df, Row(0, 0) :: Row(0, 1) :: Row(0, 2) :: Nil)
3957+
}
3958+
}
39463959
}
39473960

39483961
case class Foo(bar: Option[String])

0 commit comments

Comments
 (0)