Skip to content

Commit 318ecc8

Browse files
committed
make sure Spark is installed
1 parent 9f523d3 commit 318ecc8

21 files changed

+63
-0
lines changed

R/pkg/inst/tests/testthat/test_Serde.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("SerDe functionality")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2124

2225
test_that("SerDe of primitive types", {

R/pkg/inst/tests/testthat/test_binaryFile.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("functions on binary files")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
# JavaSparkContext handle
2124
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2225
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)

R/pkg/inst/tests/testthat/test_binary_function.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("binary functions")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
# JavaSparkContext handle
2124
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2225
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)

R/pkg/inst/tests/testthat/test_broadcast.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("broadcast variables")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
# JavaSparkContext handle
2124
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2225
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)

R/pkg/inst/tests/testthat/test_context.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("test functions in sparkR.R")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
test_that("Check masked functions", {
2124
# Check that we are not masking any new function from base, stats, testthat unexpectedly
2225
# NOTE: We should avoid adding entries to *namesOfMaskedCompletely* as masked functions make it

R/pkg/inst/tests/testthat/test_includePackage.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("include R packages")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
# JavaSparkContext handle
2124
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2225
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)

R/pkg/inst/tests/testthat/test_jvm_api.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
context("JVM API")
1919

20+
# Ensure Spark is installed
21+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
22+
2023
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2124

2225
test_that("Create and call methods on object", {

R/pkg/inst/tests/testthat/test_mllib_classification.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ library(testthat)
1919

2020
context("MLlib classification algorithms, except for tree-based algorithms")
2121

22+
# Ensure Spark is installed
23+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
24+
2225
# Tests for MLlib classification algorithms in SparkR
2326
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2427

R/pkg/inst/tests/testthat/test_mllib_clustering.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ library(testthat)
1919

2020
context("MLlib clustering algorithms")
2121

22+
# Ensure Spark is installed
23+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
24+
2225
# Tests for MLlib clustering algorithms in SparkR
2326
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2427

R/pkg/inst/tests/testthat/test_mllib_recommendation.R

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ library(testthat)
1919

2020
context("MLlib recommendation algorithms")
2121

22+
# Ensure Spark is installed
23+
sparkCheckInstall(Sys.getenv("SPARK_HOME"), "local", "")
24+
2225
# Tests for MLlib recommendation algorithms in SparkR
2326
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2427

0 commit comments

Comments
 (0)