Skip to content

Commit 3162215

Browse files
committed
fix test
1 parent 503c935 commit 3162215

File tree

2 files changed

+4
-81
lines changed

2 files changed

+4
-81
lines changed

R/pkg/inst/tests/testthat/test_context.R

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,6 @@ test_that("Check masked functions", {
5454
sort(namesOfMaskedCompletely, na.last = TRUE))
5555
})
5656

57-
test_that("repeatedly starting and stopping SparkR", {
58-
for (i in 1:4) {
59-
sc <- suppressWarnings(sparkR.init())
60-
rdd <- parallelize(sc, 1:20, 2L)
61-
expect_equal(count(rdd), 20)
62-
suppressWarnings(sparkR.stop())
63-
}
64-
})
65-
6657
test_that("repeatedly starting and stopping SparkSession", {
6758
for (i in 1:4) {
6859
sparkR.session(enableHiveSupport = FALSE)
@@ -100,10 +91,6 @@ test_that("job group functions can be called", {
10091
setJobGroup("groupId", "job description", TRUE)
10192
cancelJobGroup("groupId")
10293
clearJobGroup()
103-
104-
suppressWarnings(setJobGroup(sc, "groupId", "job description", TRUE))
105-
suppressWarnings(cancelJobGroup(sc, "groupId"))
106-
suppressWarnings(clearJobGroup(sc))
10794
sparkR.session.stop()
10895
})
10996

R/pkg/inst/tests/testthat/test_sparkSQL.R

Lines changed: 4 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -88,15 +88,6 @@ mockLinesComplexType <-
8888
complexTypeJsonPath <- tempfile(pattern = "sparkr-test", fileext = ".tmp")
8989
writeLines(mockLinesComplexType, complexTypeJsonPath)
9090

91-
test_that("calling sparkRSQL.init returns existing SQL context", {
92-
sqlContext <- suppressWarnings(sparkRSQL.init(sc))
93-
expect_equal(suppressWarnings(sparkRSQL.init(sc)), sqlContext)
94-
})
95-
96-
test_that("calling sparkRSQL.init returns existing SparkSession", {
97-
expect_equal(suppressWarnings(sparkRSQL.init(sc)), sparkSession)
98-
})
99-
10091
test_that("calling sparkR.session returns existing SparkSession", {
10192
expect_equal(sparkR.session(), sparkSession)
10293
})
@@ -474,44 +465,26 @@ test_that("read/write json files", {
474465
jsonPath3 <- tempfile(pattern = "jsonPath3", fileext = ".json")
475466
write.json(df, jsonPath3)
476467

477-
# Test read.json()/jsonFile() works with multiple input paths
468+
# Test read.json() works with multiple input paths
478469
jsonDF1 <- read.json(c(jsonPath2, jsonPath3))
479470
expect_is(jsonDF1, "SparkDataFrame")
480471
expect_equal(count(jsonDF1), 6)
481-
# Suppress warnings because jsonFile is deprecated
482-
jsonDF2 <- suppressWarnings(jsonFile(c(jsonPath2, jsonPath3)))
483-
expect_is(jsonDF2, "SparkDataFrame")
484-
expect_equal(count(jsonDF2), 6)
485472

486473
unlink(jsonPath2)
487474
unlink(jsonPath3)
488475
})
489476

490-
test_that("jsonRDD() on a RDD with json string", {
491-
sqlContext <- suppressWarnings(sparkRSQL.init(sc))
492-
rdd <- parallelize(sc, mockLines)
493-
expect_equal(count(rdd), 3)
494-
df <- suppressWarnings(jsonRDD(sqlContext, rdd))
495-
expect_is(df, "SparkDataFrame")
496-
expect_equal(count(df), 3)
497-
498-
rdd2 <- flatMap(rdd, function(x) c(x, x))
499-
df <- suppressWarnings(jsonRDD(sqlContext, rdd2))
500-
expect_is(df, "SparkDataFrame")
501-
expect_equal(count(df), 6)
502-
})
503-
504477
test_that("test tableNames and tables", {
505478
df <- read.json(jsonPath)
506479
createOrReplaceTempView(df, "table1")
507480
expect_equal(length(tableNames()), 1)
508481
tables <- tables()
509482
expect_equal(count(tables), 1)
510483

511-
suppressWarnings(registerTempTable(df, "table2"))
484+
createOrReplaceTempView(df, "table2")
512485
tables <- tables()
513486
expect_equal(count(tables), 2)
514-
suppressWarnings(dropTempTable("table1"))
487+
dropTempView("table1")
515488
dropTempView("table2")
516489

517490
tables <- tables()
@@ -1650,7 +1623,6 @@ test_that("union(), rbind(), except(), and intersect() on a DataFrame", {
16501623
expect_is(unioned, "SparkDataFrame")
16511624
expect_equal(count(unioned), 6)
16521625
expect_equal(first(unioned)$name, "Michael")
1653-
expect_equal(count(arrange(suppressWarnings(unionAll(df, df2)), df$age)), 6)
16541626

16551627
unioned2 <- arrange(rbind(unioned, df, df2), df$age)
16561628
expect_is(unioned2, "SparkDataFrame")
@@ -1777,13 +1749,10 @@ test_that("read/write Parquet files", {
17771749
parquetPath2 <- tempfile(pattern = "parquetPath2", fileext = ".parquet")
17781750
write.parquet(df, parquetPath2)
17791751
parquetPath3 <- tempfile(pattern = "parquetPath3", fileext = ".parquet")
1780-
suppressWarnings(saveAsParquetFile(df, parquetPath3))
1752+
write.parquet(df, parquetPath3)
17811753
parquetDF <- read.parquet(c(parquetPath2, parquetPath3))
17821754
expect_is(parquetDF, "SparkDataFrame")
17831755
expect_equal(count(parquetDF), count(df) * 2)
1784-
parquetDF2 <- suppressWarnings(parquetFile(parquetPath2, parquetPath3))
1785-
expect_is(parquetDF2, "SparkDataFrame")
1786-
expect_equal(count(parquetDF2), count(df) * 2)
17871756

17881757
# Test if varargs works with variables
17891758
saveMode <- "overwrite"
@@ -2400,39 +2369,6 @@ test_that("Window functions on a DataFrame", {
24002369
expect_equal(result, expected)
24012370
})
24022371

2403-
test_that("createDataFrame sqlContext parameter backward compatibility", {
2404-
sqlContext <- suppressWarnings(sparkRSQL.init(sc))
2405-
a <- 1:3
2406-
b <- c("a", "b", "c")
2407-
ldf <- data.frame(a, b)
2408-
# Call function with namespace :: operator - SPARK-16538
2409-
df <- suppressWarnings(SparkR::createDataFrame(sqlContext, ldf))
2410-
expect_equal(columns(df), c("a", "b"))
2411-
expect_equal(dtypes(df), list(c("a", "int"), c("b", "string")))
2412-
expect_equal(count(df), 3)
2413-
ldf2 <- collect(df)
2414-
expect_equal(ldf$a, ldf2$a)
2415-
2416-
df2 <- suppressWarnings(createDataFrame(sqlContext, iris))
2417-
expect_equal(count(df2), 150)
2418-
expect_equal(ncol(df2), 5)
2419-
2420-
df3 <- suppressWarnings(read.df(sqlContext, jsonPath, "json"))
2421-
expect_is(df3, "SparkDataFrame")
2422-
expect_equal(count(df3), 3)
2423-
2424-
before <- suppressWarnings(createDataFrame(sqlContext, iris))
2425-
after <- suppressWarnings(createDataFrame(iris))
2426-
expect_equal(collect(before), collect(after))
2427-
2428-
# more tests for SPARK-16538
2429-
createOrReplaceTempView(df, "table")
2430-
SparkR::tables()
2431-
SparkR::sql("SELECT 1")
2432-
suppressWarnings(SparkR::sql(sqlContext, "SELECT * FROM table"))
2433-
suppressWarnings(SparkR::dropTempTable(sqlContext, "table"))
2434-
})
2435-
24362372
test_that("randomSplit", {
24372373
num <- 4000
24382374
df <- createDataFrame(data.frame(id = 1:num))

0 commit comments

Comments
 (0)