Skip to content

Commit 795ede6

Browse files
committed
Merge remote-tracking branch 'origin/master' into SPARK-21040-speculate-decommission-exec-tasks
2 parents 1cae338 + b806fc4 commit 795ede6

File tree

83 files changed

+1191
-499
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

83 files changed

+1191
-499
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationUtils.scala renamed to common/tags/src/test/java/org/apache/spark/tags/ChromeUITest.java

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,20 +15,13 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.sql.hive.thriftserver
18+
package org.apache.spark.tags;
1919

20-
import org.apache.spark.sql.catalyst.catalog.CatalogTableType
21-
import org.apache.spark.sql.catalyst.catalog.CatalogTableType.{EXTERNAL, MANAGED, VIEW}
20+
import java.lang.annotation.*;
2221

23-
/**
24-
* Utils for metadata operations.
25-
*/
26-
private[hive] trait SparkMetadataOperationUtils {
22+
import org.scalatest.TagAnnotation;
2723

28-
def tableTypeString(tableType: CatalogTableType): String = tableType match {
29-
case EXTERNAL | MANAGED => "TABLE"
30-
case VIEW => "VIEW"
31-
case t =>
32-
throw new IllegalArgumentException(s"Unknown table type is found: $t")
33-
}
34-
}
24+
@TagAnnotation
25+
@Retention(RetentionPolicy.RUNTIME)
26+
@Target({ElementType.METHOD, ElementType.TYPE})
27+
public @interface ChromeUITest { }
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.ui
19+
20+
import org.openqa.selenium.WebDriver
21+
import org.openqa.selenium.chrome.{ChromeDriver, ChromeOptions}
22+
23+
import org.apache.spark.tags.ChromeUITest
24+
25+
/**
26+
* Selenium tests for the Spark Web UI with Chrome.
27+
*/
28+
@ChromeUITest
29+
class ChromeUISeleniumSuite extends RealBrowserUISeleniumSuite("webdriver.chrome.driver") {
30+
31+
override var webDriver: WebDriver = _
32+
33+
override def beforeAll(): Unit = {
34+
super.beforeAll()
35+
val chromeOptions = new ChromeOptions
36+
chromeOptions.addArguments("--headless", "--disable-gpu")
37+
webDriver = new ChromeDriver(chromeOptions)
38+
}
39+
40+
override def afterAll(): Unit = {
41+
try {
42+
if (webDriver != null) {
43+
webDriver.quit()
44+
}
45+
} finally {
46+
super.afterAll()
47+
}
48+
}
49+
}
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.ui
19+
20+
import org.openqa.selenium.{By, WebDriver}
21+
import org.scalatest._
22+
import org.scalatest.concurrent.Eventually._
23+
import org.scalatest.time.SpanSugar._
24+
import org.scalatestplus.selenium.WebBrowser
25+
26+
import org.apache.spark._
27+
import org.apache.spark.LocalSparkContext.withSpark
28+
import org.apache.spark.internal.config.MEMORY_OFFHEAP_SIZE
29+
import org.apache.spark.internal.config.UI.{UI_ENABLED, UI_KILL_ENABLED, UI_PORT}
30+
import org.apache.spark.util.CallSite
31+
32+
/**
33+
* Selenium tests for the Spark Web UI with real web browsers.
34+
*/
35+
abstract class RealBrowserUISeleniumSuite(val driverProp: String)
36+
extends SparkFunSuite with WebBrowser with Matchers with BeforeAndAfterAll {
37+
38+
implicit var webDriver: WebDriver
39+
private val driverPropPrefix = "spark.test."
40+
41+
override def beforeAll(): Unit = {
42+
super.beforeAll()
43+
assume(
44+
sys.props(driverPropPrefix + driverProp) !== null,
45+
"System property " + driverPropPrefix + driverProp +
46+
" should be set to the corresponding driver path.")
47+
sys.props(driverProp) = sys.props(driverPropPrefix + driverProp)
48+
}
49+
50+
override def afterAll(): Unit = {
51+
sys.props.remove(driverProp)
52+
super.afterAll()
53+
}
54+
55+
test("SPARK-31534: text for tooltip should be escaped") {
56+
withSpark(newSparkContext()) { sc =>
57+
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
58+
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
59+
sc.parallelize(1 to 10).collect
60+
61+
eventually(timeout(10.seconds), interval(50.milliseconds)) {
62+
goToUi(sc, "/jobs")
63+
64+
val jobDesc =
65+
webDriver.findElement(By.cssSelector("div[class='application-timeline-content']"))
66+
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")
67+
68+
goToUi(sc, "/jobs/job/?id=0")
69+
webDriver.get(sc.ui.get.webUrl.stripSuffix("/") + "/jobs/job/?id=0")
70+
val stageDesc = webDriver.findElement(By.cssSelector("div[class='job-timeline-content']"))
71+
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")
72+
73+
// Open DAG Viz.
74+
webDriver.findElement(By.id("job-dag-viz")).click()
75+
val nodeDesc = webDriver.findElement(By.cssSelector("g[class='node_0 node']"))
76+
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
77+
}
78+
}
79+
}
80+
81+
/**
82+
* Create a test SparkContext with the SparkUI enabled.
83+
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
84+
*/
85+
private def newSparkContext(
86+
killEnabled: Boolean = true,
87+
master: String = "local",
88+
additionalConfs: Map[String, String] = Map.empty): SparkContext = {
89+
val conf = new SparkConf()
90+
.setMaster(master)
91+
.setAppName("test")
92+
.set(UI_ENABLED, true)
93+
.set(UI_PORT, 0)
94+
.set(UI_KILL_ENABLED, killEnabled)
95+
.set(MEMORY_OFFHEAP_SIZE.key, "64m")
96+
additionalConfs.foreach { case (k, v) => conf.set(k, v) }
97+
val sc = new SparkContext(conf)
98+
assert(sc.ui.isDefined)
99+
sc
100+
}
101+
102+
def goToUi(sc: SparkContext, path: String): Unit = {
103+
goToUi(sc.ui.get, path)
104+
}
105+
106+
def goToUi(ui: SparkUI, path: String): Unit = {
107+
go to (ui.webUrl.stripSuffix("/") + path)
108+
}
109+
}

core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -773,33 +773,6 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
773773
}
774774
}
775775

776-
test("SPARK-31534: text for tooltip should be escaped") {
777-
withSpark(newSparkContext()) { sc =>
778-
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
779-
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
780-
sc.parallelize(1 to 10).collect
781-
782-
val driver = webDriver.asInstanceOf[HtmlUnitDriver]
783-
driver.setJavascriptEnabled(true)
784-
785-
eventually(timeout(10.seconds), interval(50.milliseconds)) {
786-
goToUi(sc, "/jobs")
787-
val jobDesc =
788-
driver.findElement(By.cssSelector("div[class='application-timeline-content']"))
789-
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")
790-
791-
goToUi(sc, "/jobs/job/?id=0")
792-
val stageDesc = driver.findElement(By.cssSelector("div[class='job-timeline-content']"))
793-
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")
794-
795-
// Open DAG Viz.
796-
driver.findElement(By.id("job-dag-viz")).click()
797-
val nodeDesc = driver.findElement(By.cssSelector("g[class='node_0 node']"))
798-
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
799-
}
800-
}
801-
}
802-
803776
def goToUi(sc: SparkContext, path: String): Unit = {
804777
goToUi(sc.ui.get, path)
805778
}

dev/deps/spark-deps-hadoop-2.7-hive-1.2

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
3535
commons-cli/1.2//commons-cli-1.2.jar
3636
commons-codec/1.10//commons-codec-1.10.jar
3737
commons-collections/3.2.2//commons-collections-3.2.2.jar
38-
commons-compiler/3.0.16//commons-compiler-3.0.16.jar
38+
commons-compiler/3.1.2//commons-compiler-3.1.2.jar
3939
commons-compress/1.8.1//commons-compress-1.8.1.jar
4040
commons-configuration/1.6//commons-configuration-1.6.jar
4141
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
@@ -106,7 +106,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
106106
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
107107
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
108108
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
109-
janino/3.0.16//janino-3.0.16.jar
109+
janino/3.1.2//janino-3.1.2.jar
110110
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
111111
javax.inject/1//javax.inject-1.jar
112112
javax.servlet-api/3.1.0//javax.servlet-api-3.1.0.jar

dev/deps/spark-deps-hadoop-2.7-hive-2.3

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
3333
commons-cli/1.2//commons-cli-1.2.jar
3434
commons-codec/1.10//commons-codec-1.10.jar
3535
commons-collections/3.2.2//commons-collections-3.2.2.jar
36-
commons-compiler/3.0.16//commons-compiler-3.0.16.jar
36+
commons-compiler/3.1.2//commons-compiler-3.1.2.jar
3737
commons-compress/1.8.1//commons-compress-1.8.1.jar
3838
commons-configuration/1.6//commons-configuration-1.6.jar
3939
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
@@ -119,7 +119,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
119119
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
120120
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
121121
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
122-
janino/3.0.16//janino-3.0.16.jar
122+
janino/3.1.2//janino-3.1.2.jar
123123
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
124124
javax.inject/1//javax.inject-1.jar
125125
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar

dev/deps/spark-deps-hadoop-3.2-hive-2.3

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,14 +30,14 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
3030
commons-cli/1.2//commons-cli-1.2.jar
3131
commons-codec/1.10//commons-codec-1.10.jar
3232
commons-collections/3.2.2//commons-collections-3.2.2.jar
33-
commons-compiler/3.0.16//commons-compiler-3.0.16.jar
33+
commons-compiler/3.1.2//commons-compiler-3.1.2.jar
3434
commons-compress/1.8.1//commons-compress-1.8.1.jar
3535
commons-configuration2/2.1.1//commons-configuration2-2.1.1.jar
3636
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
3737
commons-daemon/1.0.13//commons-daemon-1.0.13.jar
3838
commons-dbcp/1.4//commons-dbcp-1.4.jar
3939
commons-httpclient/3.1//commons-httpclient-3.1.jar
40-
commons-io/2.4//commons-io-2.4.jar
40+
commons-io/2.5//commons-io-2.5.jar
4141
commons-lang/2.6//commons-lang-2.6.jar
4242
commons-lang3/3.9//commons-lang3-3.9.jar
4343
commons-logging/1.1.3//commons-logging-1.1.3.jar
@@ -118,7 +118,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
118118
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
119119
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
120120
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
121-
janino/3.0.16//janino-3.0.16.jar
121+
janino/3.1.2//janino-3.1.2.jar
122122
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
123123
javax.inject/1//javax.inject-1.jar
124124
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar

docs/_data/menu-sql.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -171,22 +171,22 @@
171171
url: sql-ref-syntax-qry-select-limit.html
172172
- text: Common Table Expression
173173
url: sql-ref-syntax-qry-select-cte.html
174+
- text: Hints
175+
url: sql-ref-syntax-qry-select-hints.html
174176
- text: Inline Table
175177
url: sql-ref-syntax-qry-select-inline-table.html
176178
- text: JOIN
177179
url: sql-ref-syntax-qry-select-join.html
178-
- text: Join Hints
179-
url: sql-ref-syntax-qry-select-hints.html
180180
- text: LIKE Predicate
181181
url: sql-ref-syntax-qry-select-like.html
182182
- text: Set Operators
183183
url: sql-ref-syntax-qry-select-setops.html
184184
- text: TABLESAMPLE
185-
url: sql-ref-syntax-qry-sampling.html
185+
url: sql-ref-syntax-qry-select-sampling.html
186186
- text: Table-valued Function
187187
url: sql-ref-syntax-qry-select-tvf.html
188188
- text: Window Function
189-
url: sql-ref-syntax-qry-window.html
189+
url: sql-ref-syntax-qry-select-window.html
190190
- text: EXPLAIN
191191
url: sql-ref-syntax-qry-explain.html
192192
- text: Auxiliary Statements

docs/sql-performance-tuning.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ SELECT /*+ BROADCAST(r) */ * FROM records r JOIN src s ON r.key = s.key
179179
</div>
180180
</div>
181181

182-
For more details please refer to the documentation of [Join Hints](sql-ref-syntax-qry-select-hints.html).
182+
For more details please refer to the documentation of [Join Hints](sql-ref-syntax-qry-select-hints.html#join-hints).
183183

184184
## Coalesce Hints for SQL Queries
185185

@@ -196,6 +196,8 @@ The "REPARTITION_BY_RANGE" hint must have column names and a partition number is
196196
SELECT /*+ REPARTITION_BY_RANGE(c) */ * FROM t
197197
SELECT /*+ REPARTITION_BY_RANGE(3, c) */ * FROM t
198198

199+
For more details please refer to the documentation of [Partitioning Hints](sql-ref-syntax-qry-select-hints.html#partitioning-hints).
200+
199201
## Adaptive Query Execution
200202
Adaptive Query Execution (AQE) is an optimization technique in Spark SQL that makes use of the runtime statistics to choose the most efficient query execution plan. AQE is disabled by default. Spark SQL can use the umbrella configuration of `spark.sql.adaptive.enabled` to control whether turn it on/off. As of Spark 3.0, there are three major features in AQE, including coalescing post-shuffle partitions, converting sort-merge join to broadcast join, and skew join optimization.
201203

docs/sql-ref-datetime-pattern.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ The count of pattern letters determines the format.
7474
For formatting, the fraction length would be padded to the number of contiguous 'S' with zeros.
7575
Spark supports datetime of micro-of-second precision, which has up to 6 significant digits, but can parse nano-of-second with exceeded part truncated.
7676

77-
- Year: The count of letters determines the minimum field width below which padding is used. If the count of letters is two, then a reduced two digit form is used. For printing, this outputs the rightmost two digits. For parsing, this will parse using the base value of 2000, resulting in a year within the range 2000 to 2099 inclusive. If the count of letters is less than four (but not two), then the sign is only output for negative years. Otherwise, the sign is output if the pad width is exceeded when 'G' is not present.
77+
- Year: The count of letters determines the minimum field width below which padding is used. If the count of letters is two, then a reduced two digit form is used. For printing, this outputs the rightmost two digits. For parsing, this will parse using the base value of 2000, resulting in a year within the range 2000 to 2099 inclusive. If the count of letters is less than four (but not two), then the sign is only output for negative years. Otherwise, the sign is output if the pad width is exceeded when 'G' is not present. 11 or more letters will fail.
7878

7979
- Month: It follows the rule of Number/Text. The text form is depend on letters - 'M' denotes the 'standard' form, and 'L' is for 'stand-alone' form. These two forms are different only in some certain languages. For example, in Russian, 'Июль' is the stand-alone form of July, and 'Июля' is the standard form. Here are examples for all supported pattern letters:
8080
- `'M'` or `'L'`: Month number in a year starting from 1. There is no difference between 'M' and 'L'. Month from 1 to 9 are printed without padding.

0 commit comments

Comments
 (0)