Skip to content

Commit d78b7d5

Browse files
committed
modify hacky code
1 parent a8dbcca commit d78b7d5

File tree

2 files changed

+18
-23
lines changed

2 files changed

+18
-23
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1836,18 +1836,17 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18361836
test("insert data to a data source table which has a not existed location should succeed") {
18371837
withTable("t") {
18381838
withTempDir { dir =>
1839-
val path = dir.toURI.toString.stripSuffix("/")
18401839
spark.sql(
18411840
s"""
18421841
|CREATE TABLE t(a string, b int)
18431842
|USING parquet
1844-
|OPTIONS(path "$path")
1843+
|OPTIONS(path "$dir")
18451844
""".stripMargin)
18461845
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1847-
assert(table.location == path)
1846+
assert(table.location == dir.getAbsolutePath)
18481847

18491848
dir.delete
1850-
val tableLocFile = new File(table.location.stripPrefix("file:"))
1849+
val tableLocFile = new File(table.location)
18511850
assert(!tableLocFile.exists)
18521851
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
18531852
assert(tableLocFile.exists)
@@ -1878,16 +1877,15 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18781877
test("insert into a data source table with no existed partition location should succeed") {
18791878
withTable("t") {
18801879
withTempDir { dir =>
1881-
val path = dir.toURI.toString.stripSuffix("/")
18821880
spark.sql(
18831881
s"""
18841882
|CREATE TABLE t(a int, b int, c int, d int)
18851883
|USING parquet
18861884
|PARTITIONED BY(a, b)
1887-
|LOCATION "$path"
1885+
|LOCATION "$dir"
18881886
""".stripMargin)
18891887
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1890-
assert(table.location == path)
1888+
assert(table.location == dir.getAbsolutePath)
18911889

18921890
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
18931891
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
@@ -1906,15 +1904,14 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
19061904
test("read data from a data source table which has a not existed location should succeed") {
19071905
withTable("t") {
19081906
withTempDir { dir =>
1909-
val path = dir.toURI.toString.stripSuffix("/")
19101907
spark.sql(
19111908
s"""
19121909
|CREATE TABLE t(a string, b int)
19131910
|USING parquet
1914-
|OPTIONS(path "$path")
1911+
|OPTIONS(path "$dir")
19151912
""".stripMargin)
19161913
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1917-
assert(table.location == path)
1914+
assert(table.location == dir.getAbsolutePath)
19181915

19191916
dir.delete()
19201917
checkAnswer(spark.table("t"), Nil)
@@ -1939,7 +1936,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
19391936
|CREATE TABLE t(a int, b int, c int, d int)
19401937
|USING parquet
19411938
|PARTITIONED BY(a, b)
1942-
|LOCATION "${dir.toURI}"
1939+
|LOCATION "$dir"
19431940
""".stripMargin)
19441941
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
19451942
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
@@ -1969,9 +1966,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
19691966
|LOCATION '$dir'
19701967
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
19711968
""".stripMargin)
1972-
19731969
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1974-
assert(table.location.stripSuffix("/") == dir.getAbsolutePath.stripSuffix("/"))
1970+
assert(table.location == dir.getAbsolutePath)
19751971

19761972
checkAnswer(spark.table("t"), Row(3, 4, 1, 2))
19771973
}
@@ -1989,9 +1985,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
19891985
|LOCATION '$dir'
19901986
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
19911987
""".stripMargin)
1992-
19931988
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
1994-
assert(table.location.stripSuffix("/") == dir.getAbsolutePath.stripSuffix("/"))
1989+
assert(table.location == dir.getAbsolutePath)
19951990

19961991
val partDir = new File(dir, "a=3")
19971992
assert(partDir.exists())

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1606,7 +1606,7 @@ class HiveDDLSuite
16061606
""".stripMargin)
16071607

16081608
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1609-
assert(table.location.stripSuffix("/") == dir.getAbsolutePath.stripSuffix("/"))
1609+
assert(table.location == dir.getAbsolutePath)
16101610

16111611
checkAnswer(spark.table("t"), Row(3, 4, 1, 2))
16121612
}
@@ -1626,7 +1626,7 @@ class HiveDDLSuite
16261626
""".stripMargin)
16271627

16281628
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
1629-
assert(table.location.stripSuffix("/") == dir.getAbsolutePath.stripSuffix("/"))
1629+
assert(table.location == dir.getAbsolutePath)
16301630

16311631
val partDir = new File(dir, "a=3")
16321632
assert(partDir.exists())
@@ -1651,10 +1651,10 @@ class HiveDDLSuite
16511651
|LOCATION '$dir'
16521652
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
16531653
""".stripMargin)
1654-
1654+
val dirPath = new Path(dir.getAbsolutePath)
1655+
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
16551656
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1656-
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1657-
assert(table.location.stripSuffix("/") == expectedPath)
1657+
assert(new Path(table.location) == fs.makeQualified(dirPath))
16581658

16591659
checkAnswer(spark.table("t"), Row(3, 4, 1, 2))
16601660
}
@@ -1672,10 +1672,10 @@ class HiveDDLSuite
16721672
|LOCATION '$dir'
16731673
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
16741674
""".stripMargin)
1675-
1675+
val dirPath = new Path(dir.getAbsolutePath)
1676+
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
16761677
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
1677-
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1678-
assert(table.location.stripSuffix("/") == expectedPath)
1678+
assert(new Path(table.location) == fs.makeQualified(dirPath))
16791679

16801680
val partDir = new File(dir, "a=3")
16811681
assert(partDir.exists())

0 commit comments

Comments
 (0)