Skip to content

Commit 573f07d

Browse files
zjffduHyukjinKwon
authored andcommitted
add test for spark 2.4 (#1)
1 parent 9ac1797 commit 573f07d

File tree

5 files changed

+89
-33
lines changed

5 files changed

+89
-33
lines changed

.travis.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ sudo: false
2020

2121
before_cache:
2222
- sudo chown -R travis:travis $HOME/.m2
23-
23+
2424
cache:
2525
apt: true
2626
directories:
@@ -98,15 +98,15 @@ matrix:
9898
dist: trusty
9999
env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
100100

101-
# Test spark module for 2.1.0 with scala 2.11
101+
# Test spark module for 2.4.0 with scala 2.11
102102
- jdk: "oraclejdk8"
103103
dist: trusty
104-
env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.1 -Phadoop2 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
104+
env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Phadoop2 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
105105

106-
# Test spark module for 2.0.2 with scala 2.11
106+
# Test spark module for 2.3.2 with scala 2.11
107107
- jdk: "oraclejdk8"
108108
dist: trusty
109-
env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.0 -Phadoop3 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
109+
env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Phadoop3 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
110110

111111
# Test python/pyspark with python 2, livy 0.5
112112
- sudo: required

spark/interpreter/pom.xml

Lines changed: 48 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
<pyspark.test.exclude>**/PySparkInterpreterMatplotlibTest.java</pyspark.test.exclude>
5454
<pyspark.test.include>**/*Test.*</pyspark.test.include>
5555

56-
56+
5757
</properties>
5858

5959
<dependencies>
@@ -69,12 +69,6 @@
6969
<version>${project.version}</version>
7070
</dependency>
7171

72-
<dependency>
73-
<groupId>org.apache.zeppelin</groupId>
74-
<artifactId>spark-scala-2.10</artifactId>
75-
<version>${project.version}</version>
76-
</dependency>
77-
7872
<dependency>
7973
<groupId>org.apache.zeppelin</groupId>
8074
<artifactId>zeppelin-interpreter-api</artifactId>
@@ -609,4 +603,51 @@
609603
</plugins>
610604
</build>
611605

606+
<profiles>
607+
608+
<profile>
609+
<id>spark-2.2</id>
610+
<dependencies>
611+
<dependency>
612+
<groupId>org.apache.zeppelin</groupId>
613+
<artifactId>spark-scala-2.10</artifactId>
614+
<version>${project.version}</version>
615+
</dependency>
616+
</dependencies>
617+
</profile>
618+
619+
<profile>
620+
<id>spark-2.1</id>
621+
<dependencies>
622+
<dependency>
623+
<groupId>org.apache.zeppelin</groupId>
624+
<artifactId>spark-scala-2.10</artifactId>
625+
<version>${project.version}</version>
626+
</dependency>
627+
</dependencies>
628+
</profile>
629+
630+
<profile>
631+
<id>spark-2.0</id>
632+
<dependencies>
633+
<dependency>
634+
<groupId>org.apache.zeppelin</groupId>
635+
<artifactId>spark-scala-2.10</artifactId>
636+
<version>${project.version}</version>
637+
</dependency>
638+
</dependencies>
639+
</profile>
640+
641+
<profile>
642+
<id>spark-1.6</id>
643+
<dependencies>
644+
<dependency>
645+
<groupId>org.apache.zeppelin</groupId>
646+
<artifactId>spark-scala-2.10</artifactId>
647+
<version>${project.version}</version>
648+
</dependency>
649+
</dependencies>
650+
</profile>
651+
652+
</profiles>
612653
</project>

spark/pom.xml

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -49,17 +49,16 @@
4949

5050
<spark.archive>spark-${spark.version}</spark.archive>
5151
<spark.src.download.url>
52-
http://d3kbcqa49mib13.cloudfront.net/${spark.archive}.tgz
52+
https://archive.apache.org/dist/spark/${spark.archive}/${spark.archive}.tgz
5353
</spark.src.download.url>
5454
<spark.bin.download.url>
55-
http://d3kbcqa49mib13.cloudfront.net/${spark.archive}-bin-without-hadoop.tgz
55+
https://archive.apache.org/dist/spark/${spark.archive}/${spark.archive}-bin-without-hadoop.tgz
5656
</spark.bin.download.url>
5757
</properties>
5858

5959
<modules>
6060
<module>interpreter</module>
6161
<module>spark-scala-parent</module>
62-
<module>scala-2.10</module>
6362
<module>scala-2.11</module>
6463
<module>spark-dependencies</module>
6564
<module>spark-shims</module>
@@ -197,36 +196,42 @@
197196
<properties>
198197
<spark.version>2.4.0</spark.version>
199198
<protobuf.version>2.5.0</protobuf.version>
200-
<spark.py4j.version>0.10.7</spark.py4j.version>
199+
<py4j.version>0.10.7</py4j.version>
201200
</properties>
202201
</profile>
203202

204203
<profile>
205204
<id>spark-2.3</id>
206205
<properties>
207-
<spark.version>2.3.0</spark.version>
206+
<spark.version>2.3.2</spark.version>
208207
<protobuf.version>2.5.0</protobuf.version>
209-
<spark.py4j.version>0.10.6</spark.py4j.version>
208+
<py4j.version>0.10.6</py4j.version>
210209
</properties>
211210
</profile>
212211

213212
<profile>
214213
<id>spark-2.2</id>
215-
<activation>
216-
<activeByDefault>true</activeByDefault>
217-
</activation>
218214
<properties>
219-
<spark.version>2.2.0</spark.version>
215+
<spark.version>2.2.1</spark.version>
220216
<py4j.version>0.10.4</py4j.version>
221217
</properties>
218+
<activation>
219+
<activeByDefault>true</activeByDefault>
220+
</activation>
221+
<modules>
222+
<module>scala-2.10</module>
223+
</modules>
222224
</profile>
223225

224226
<profile>
225227
<id>spark-2.1</id>
226228
<properties>
227-
<spark.version>2.1.0</spark.version>
229+
<spark.version>2.1.2</spark.version>
228230
<py4j.version>0.10.4</py4j.version>
229231
</properties>
232+
<modules>
233+
<module>scala-2.10</module>
234+
</modules>
230235
</profile>
231236

232237
<profile>
@@ -235,6 +240,9 @@
235240
<spark.version>2.0.2</spark.version>
236241
<py4j.version>0.10.3</py4j.version>
237242
</properties>
243+
<modules>
244+
<module>scala-2.10</module>
245+
</modules>
238246
</profile>
239247

240248
<profile>
@@ -243,7 +251,10 @@
243251
<spark.version>1.6.3</spark.version>
244252
<py4j.version>0.9</py4j.version>
245253
</properties>
254+
<modules>
255+
<module>scala-2.10</module>
256+
</modules>
246257
</profile>
247-
258+
248259
</profiles>
249260
</project>

zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
6464
//ci timeout.
6565
//TODO(zjffdu) remove this after we upgrade it to junit 4.13 (ZEPPELIN-3341)
6666
private static Set<String> verifiedSparkVersions = new HashSet<>();
67-
67+
6868

6969
private String sparkVersion;
7070
private AuthenticationInfo anonymous = new AuthenticationInfo("anonymous");
@@ -83,10 +83,12 @@ public ZeppelinSparkClusterTest(String sparkVersion) throws Exception {
8383
@Parameterized.Parameters
8484
public static List<Object[]> data() {
8585
return Arrays.asList(new Object[][]{
86-
{"2.2.1"},
87-
{"2.1.2"},
88-
{"2.0.2"},
89-
{"1.6.3"}
86+
{"2.4.0"},
87+
{"2.3.2"},
88+
{"2.2.1"},
89+
{"2.1.2"},
90+
{"2.0.2"},
91+
{"1.6.3"}
9092
});
9193
}
9294

zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTest.java

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,12 @@ public SparkIntegrationTest(String sparkVersion) {
4242
@Parameterized.Parameters
4343
public static List<Object[]> data() {
4444
return Arrays.asList(new Object[][]{
45-
{"2.2.1"},
46-
{"2.1.2"},
47-
{"2.0.2"},
48-
{"1.6.3"}
45+
{"2.4.0"},
46+
{"2.3.2"},
47+
{"2.2.1"},
48+
{"2.1.2"},
49+
{"2.0.2"},
50+
{"1.6.3"}
4951
});
5052
}
5153

0 commit comments

Comments
 (0)