Skip to content

Commit 197dc34

Browse files
authored
Revert "[SPARK-37600][BUILD] Upgrade to Hadoop 3.3.2" (apache#565)
This reverts commit 4da04fc,
1 parent 4c60a77 commit 197dc34

File tree

11 files changed

+144
-93
lines changed

11 files changed

+144
-93
lines changed

LICENSE-binary

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -457,7 +457,6 @@ net.sf.py4j:py4j
457457
org.jpmml:pmml-model
458458
org.jpmml:pmml-schema
459459
org.threeten:threeten-extra
460-
org.jdom:jdom2
461460

462461
python/lib/py4j-*-src.zip
463462
python/pyspark/cloudpickle.py
@@ -506,7 +505,6 @@ Common Development and Distribution License (CDDL) 1.0
506505
javax.activation:activation http://www.oracle.com/technetwork/java/javase/tech/index-jsp-138795.html
507506
javax.xml.stream:stax-api https://jcp.org/en/jsr/detail?id=173
508507
javax.transaction:javax.transaction-api
509-
javax.xml.bind:jaxb-api
510508

511509

512510
Common Development and Distribution License (CDDL) 1.1

NOTICE-binary

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -917,9 +917,6 @@ This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin
917917
g Package (jaspell): http://jaspell.sourceforge.net/
918918
License: The BSD License (http://www.opensource.org/licenses/bsd-license.php)
919919

920-
This product includes software developed by the JDOM Project (http://www.jdom.org/)
921-
License: https://raw.githubusercontent.com/hunterhacker/jdom/master/LICENSE.txt
922-
923920
The snowball stemmers in
924921
analysis/common/src/java/net/sf/snowball
925922
were developed by Martin Porter and Richard Boulton.
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.hadoop.shaded.net.jpountz.lz4;
19+
20+
/**
21+
* TODO(SPARK-36679): A temporary workaround for SPARK-36669. We should remove this after
22+
* Hadoop 3.3.2 release which fixes the LZ4 relocation in shaded Hadoop client libraries.
23+
* This does not need implement all net.jpountz.lz4.LZ4Compressor API, just the ones used
24+
* by Hadoop Lz4Compressor.
25+
*/
26+
public final class LZ4Compressor {
27+
28+
private net.jpountz.lz4.LZ4Compressor lz4Compressor;
29+
30+
public LZ4Compressor(net.jpountz.lz4.LZ4Compressor lz4Compressor) {
31+
this.lz4Compressor = lz4Compressor;
32+
}
33+
34+
public void compress(java.nio.ByteBuffer src, java.nio.ByteBuffer dest) {
35+
lz4Compressor.compress(src, dest);
36+
}
37+
}
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.hadoop.shaded.net.jpountz.lz4;
19+
20+
/**
21+
* TODO(SPARK-36679): A temporary workaround for SPARK-36669. We should remove this after
22+
* Hadoop 3.3.2 release which fixes the LZ4 relocation in shaded Hadoop client libraries.
23+
* This does not need implement all net.jpountz.lz4.LZ4Factory API, just the ones used by
24+
* Hadoop Lz4Compressor.
25+
*/
26+
public final class LZ4Factory {
27+
28+
private net.jpountz.lz4.LZ4Factory lz4Factory;
29+
30+
public LZ4Factory(net.jpountz.lz4.LZ4Factory lz4Factory) {
31+
this.lz4Factory = lz4Factory;
32+
}
33+
34+
public static LZ4Factory fastestInstance() {
35+
return new LZ4Factory(net.jpountz.lz4.LZ4Factory.fastestInstance());
36+
}
37+
38+
public LZ4Compressor highCompressor() {
39+
return new LZ4Compressor(lz4Factory.highCompressor());
40+
}
41+
42+
public LZ4Compressor fastCompressor() {
43+
return new LZ4Compressor(lz4Factory.fastCompressor());
44+
}
45+
46+
public LZ4SafeDecompressor safeDecompressor() {
47+
return new LZ4SafeDecompressor(lz4Factory.safeDecompressor());
48+
}
49+
}
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.hadoop.shaded.net.jpountz.lz4;
19+
20+
/**
21+
* TODO(SPARK-36679): A temporary workaround for SPARK-36669. We should remove this after
22+
* Hadoop 3.3.2 release which fixes the LZ4 relocation in shaded Hadoop client libraries.
23+
* This does not need implement all net.jpountz.lz4.LZ4SafeDecompressor API, just the ones
24+
* used by Hadoop Lz4Decompressor.
25+
*/
26+
public final class LZ4SafeDecompressor {
27+
private net.jpountz.lz4.LZ4SafeDecompressor lz4Decompressor;
28+
29+
public LZ4SafeDecompressor(net.jpountz.lz4.LZ4SafeDecompressor lz4Decompressor) {
30+
this.lz4Decompressor = lz4Decompressor;
31+
}
32+
33+
public void decompress(java.nio.ByteBuffer src, java.nio.ByteBuffer dest) {
34+
lz4Decompressor.decompress(src, dest);
35+
}
36+
}

dev/deps/spark-deps-hadoop-3-hive-2.3

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,11 @@ ST4/4.0.4//ST4-4.0.4.jar
66
activation/1.1.1//activation-1.1.1.jar
77
aircompressor/0.21//aircompressor-0.21.jar
88
algebra_2.12/2.0.1//algebra_2.12-2.0.1.jar
9-
aliyun-java-sdk-core/4.5.10//aliyun-java-sdk-core-4.5.10.jar
10-
aliyun-java-sdk-kms/2.11.0//aliyun-java-sdk-kms-2.11.0.jar
11-
aliyun-java-sdk-ram/3.1.0//aliyun-java-sdk-ram-3.1.0.jar
12-
aliyun-sdk-oss/3.13.0//aliyun-sdk-oss-3.13.0.jar
9+
aliyun-java-sdk-core/3.4.0//aliyun-java-sdk-core-3.4.0.jar
10+
aliyun-java-sdk-ecs/4.2.0//aliyun-java-sdk-ecs-4.2.0.jar
11+
aliyun-java-sdk-ram/3.0.0//aliyun-java-sdk-ram-3.0.0.jar
12+
aliyun-java-sdk-sts/3.0.0//aliyun-java-sdk-sts-3.0.0.jar
13+
aliyun-sdk-oss/3.4.1//aliyun-sdk-oss-3.4.1.jar
1314
annotations/17.0.0//annotations-17.0.0.jar
1415
antlr-runtime/3.5.2//antlr-runtime-3.5.2.jar
1516
antlr4-runtime/4.8//antlr4-runtime-4.8.jar
@@ -25,7 +26,7 @@ automaton/1.11-8//automaton-1.11-8.jar
2526
avro-ipc/1.11.0//avro-ipc-1.11.0.jar
2627
avro-mapred/1.11.0//avro-mapred-1.11.0.jar
2728
avro/1.11.0//avro-1.11.0.jar
28-
aws-java-sdk-bundle/1.11.1026//aws-java-sdk-bundle-1.11.1026.jar
29+
aws-java-sdk-bundle/1.11.901//aws-java-sdk-bundle-1.11.901.jar
2930
azure-data-lake-store-sdk/2.3.9//azure-data-lake-store-sdk-2.3.9.jar
3031
azure-keyvault-core/1.0.0//azure-keyvault-core-1.0.0.jar
3132
azure-storage/7.0.1//azure-storage-7.0.1.jar
@@ -67,18 +68,18 @@ generex/1.0.2//generex-1.0.2.jar
6768
gmetric4j/1.0.10//gmetric4j-1.0.10.jar
6869
gson/2.2.4//gson-2.2.4.jar
6970
guava/14.0.1//guava-14.0.1.jar
70-
hadoop-aliyun/3.3.2//hadoop-aliyun-3.3.2.jar
71-
hadoop-annotations/3.3.2//hadoop-annotations-3.3.2.jar
72-
hadoop-aws/3.3.2//hadoop-aws-3.3.2.jar
73-
hadoop-azure-datalake/3.3.2//hadoop-azure-datalake-3.3.2.jar
74-
hadoop-azure/3.3.2//hadoop-azure-3.3.2.jar
75-
hadoop-client-api/3.3.2//hadoop-client-api-3.3.2.jar
76-
hadoop-client-runtime/3.3.2//hadoop-client-runtime-3.3.2.jar
77-
hadoop-cloud-storage/3.3.2//hadoop-cloud-storage-3.3.2.jar
78-
hadoop-cos/3.3.2//hadoop-cos-3.3.2.jar
79-
hadoop-openstack/3.3.2//hadoop-openstack-3.3.2.jar
71+
hadoop-aliyun/3.3.1//hadoop-aliyun-3.3.1.jar
72+
hadoop-annotations/3.3.1//hadoop-annotations-3.3.1.jar
73+
hadoop-aws/3.3.1//hadoop-aws-3.3.1.jar
74+
hadoop-azure-datalake/3.3.1//hadoop-azure-datalake-3.3.1.jar
75+
hadoop-azure/3.3.1//hadoop-azure-3.3.1.jar
76+
hadoop-client-api/3.3.1//hadoop-client-api-3.3.1.jar
77+
hadoop-client-runtime/3.3.1//hadoop-client-runtime-3.3.1.jar
78+
hadoop-cloud-storage/3.3.1//hadoop-cloud-storage-3.3.1.jar
79+
hadoop-cos/3.3.1//hadoop-cos-3.3.1.jar
80+
hadoop-openstack/3.3.1//hadoop-openstack-3.3.1.jar
8081
hadoop-shaded-guava/1.1.1//hadoop-shaded-guava-1.1.1.jar
81-
hadoop-yarn-server-web-proxy/3.3.2//hadoop-yarn-server-web-proxy-3.3.2.jar
82+
hadoop-yarn-server-web-proxy/3.3.1//hadoop-yarn-server-web-proxy-3.3.1.jar
8283
hive-beeline/2.3.9//hive-beeline-2.3.9.jar
8384
hive-cli/2.3.9//hive-cli-2.3.9.jar
8485
hive-common/2.3.9//hive-common-2.3.9.jar
@@ -98,9 +99,9 @@ hive-contrib/1.1.0-cdh5.16.2//hive-contrib-1.1.0-cdh5.16.2.jar
9899
hk2-api/2.6.1//hk2-api-2.6.1.jar
99100
hk2-locator/2.6.1//hk2-locator-2.6.1.jar
100101
hk2-utils/2.6.1//hk2-utils-2.6.1.jar
102+
htrace-core4/4.1.0-incubating//htrace-core4-4.1.0-incubating.jar
101103
httpclient/4.5.13//httpclient-4.5.13.jar
102104
httpcore/4.4.14//httpcore-4.4.14.jar
103-
ini4j/0.5.4//ini4j-0.5.4.jar
104105
istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
105106
ivy/2.5.0//ivy-2.5.0.jar
106107
jackson-annotations/2.13.3//jackson-annotations-2.13.3.jar
@@ -122,11 +123,10 @@ janino/3.0.16//janino-3.0.16.jar
122123
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
123124
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
124125
javolution/5.5.1//javolution-5.5.1.jar
125-
jaxb-api/2.2.11//jaxb-api-2.2.11.jar
126126
jaxb-runtime/2.3.2//jaxb-runtime-2.3.2.jar
127127
jcl-over-slf4j/1.7.32//jcl-over-slf4j-1.7.32.jar
128128
jdo-api/3.0.1//jdo-api-3.0.1.jar
129-
jdom2/2.0.6//jdom2-2.0.6.jar
129+
jdom/1.1//jdom-1.1.jar
130130
jersey-client/2.34//jersey-client-2.34.jar
131131
jersey-common/2.34//jersey-common-2.34.jar
132132
jersey-container-servlet-core/2.34//jersey-container-servlet-core-2.34.jar

hadoop-cloud/pom.xml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -267,13 +267,6 @@
267267
<groupId>com.google.guava</groupId>
268268
<artifactId>guava</artifactId>
269269
</exclusion>
270-
<exclusion>
271-
<!--
272-
This is a code coverage library introduced by aliyun-java-sdk-core, only for testing
273-
-->
274-
<groupId>org.jacoco</groupId>
275-
<artifactId>org.jacoco.agent</artifactId>
276-
</exclusion>
277270
</exclusions>
278271
</dependency>
279272
<!--

licenses/LICENSE-jdom.txt

Lines changed: 0 additions & 54 deletions
This file was deleted.

pom.xml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@
116116
<slf4j.version>1.7.30</slf4j.version>
117117
<log4j.version>2.18.0</log4j.version>
118118
<!-- make sure to update IsolatedClientLoader whenever this version is changed -->
119-
<hadoop.version>3.3.2</hadoop.version>
119+
<hadoop.version>3.3.1</hadoop.version>
120120
<gson.version>2.8.9</gson.version>
121121
<protobuf.version>3.16.1</protobuf.version>
122122
<yarn.version>${hadoop.version}</yarn.version>
@@ -3470,7 +3470,6 @@
34703470
<profile>
34713471
<id>hadoop-2</id>
34723472
<properties>
3473-
<!-- make sure to update IsolatedClientLoader whenever this version is changed -->
34743473
<hadoop.version>2.7.4</hadoop.version>
34753474
<curator.version>2.7.1</curator.version>
34763475
<commons-io.version>2.8.0</commons-io.version>

project/MimaExcludes.scala

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,7 @@ object MimaExcludes {
5555
// [SPARK-37831][CORE] Add task partition id in TaskInfo and Task Metrics
5656
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.status.api.v1.TaskData.this"),
5757

58-
// [SPARK-37600][BUILD] Upgrade to Hadoop 3.3.2
59-
ProblemFilters.exclude[MissingClassProblem]("org.apache.hadoop.shaded.net.jpountz.lz4.LZ4Compressor"),
60-
ProblemFilters.exclude[MissingClassProblem]("org.apache.hadoop.shaded.net.jpountz.lz4.LZ4Factory"),
61-
ProblemFilters.exclude[MissingClassProblem]("org.apache.hadoop.shaded.net.jpountz.lz4.LZ4SafeDecompressor"),
62-
58+
6359
// [SPARK-37377][SQL] Initial implementation of Storage-Partitioned Join
6460
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.connector.read.partitioning.ClusteredDistribution"),
6561
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.connector.read.partitioning.Distribution"),

0 commit comments

Comments
 (0)