@@ -103,60 +103,6 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with Befo
103103 }
104104 }
105105
106- test(" CTAS a managed table with the existing empty directory" ) {
107- val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
108- try {
109- tableLoc.mkdir()
110- withTable(" tab1" ) {
111- sql(" CREATE TABLE tab1 USING PARQUET AS SELECT 1, 'a'" )
112- checkAnswer(spark.table(" tab1" ), Row (1 , " a" ))
113- }
114- } finally {
115- waitForTasksToFinish()
116- Utils .deleteRecursively(tableLoc)
117- }
118- }
119-
120- test(" create a managed table with the existing empty directory" ) {
121- val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
122- try {
123- tableLoc.mkdir()
124- withTable(" tab1" ) {
125- sql(" CREATE TABLE tab1 (col1 int, col2 string) USING PARQUET" )
126- sql(" INSERT INTO tab1 VALUES (1, 'a')" )
127- checkAnswer(spark.table(" tab1" ), Row (1 , " a" ))
128- }
129- } finally {
130- waitForTasksToFinish()
131- Utils .deleteRecursively(tableLoc)
132- }
133- }
134-
135- test(" create a managed table with the existing non-empty directory" ) {
136- withTable(" tab1" ) {
137- val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
138- try {
139- // create an empty hidden file
140- tableLoc.mkdir()
141- val hiddenGarbageFile = new File (tableLoc.getCanonicalPath, " .garbage" )
142- hiddenGarbageFile.createNewFile()
143- var ex = intercept[AnalysisException ] {
144- sql(" CREATE TABLE tab1 USING PARQUET AS SELECT 1, 'a'" )
145- }.getMessage
146- assert(ex.contains(" Can not create the managed table('`tab1`'). The associated location" ))
147-
148- ex = intercept[AnalysisException ] {
149- sql(" CREATE TABLE tab1 (col1 int, col2 string) USING PARQUET" )
150- }.getMessage
151- assert(ex.contains(
152- " Can not create the managed table('`default`.`tab1`'). The associated location" ))
153- } finally {
154- waitForTasksToFinish()
155- Utils .deleteRecursively(tableLoc)
156- }
157- }
158- }
159-
160106 test(" Create Hive Table As Select" ) {
161107 import testImplicits ._
162108 withTable(" t" , " t1" ) {
@@ -234,6 +180,13 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
234180
235181 private val escapedIdentifier = " `(.+)`" .r
236182
183+ private def dataSource : String = {
184+ if (isUsingHiveMetastore) {
185+ " HIVE"
186+ } else {
187+ " PARQUET"
188+ }
189+ }
237190 protected def normalizeCatalogTable (table : CatalogTable ): CatalogTable = table
238191
239192 private def normalizeSerdeProp (props : Map [String , String ]): Map [String , String ] = {
@@ -419,6 +372,60 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
419372 }
420373 }
421374
375+ test(" CTAS a managed table with the existing empty directory" ) {
376+ val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
377+ try {
378+ tableLoc.mkdir()
379+ withTable(" tab1" ) {
380+ sql(s " CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a' " )
381+ checkAnswer(spark.table(" tab1" ), Row (1 , " a" ))
382+ }
383+ } finally {
384+ waitForTasksToFinish()
385+ Utils .deleteRecursively(tableLoc)
386+ }
387+ }
388+
389+ test(" create a managed table with the existing empty directory" ) {
390+ val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
391+ try {
392+ tableLoc.mkdir()
393+ withTable(" tab1" ) {
394+ sql(s " CREATE TABLE tab1 (col1 int, col2 string) USING ${dataSource}" )
395+ sql(" INSERT INTO tab1 VALUES (1, 'a')" )
396+ checkAnswer(spark.table(" tab1" ), Row (1 , " a" ))
397+ }
398+ } finally {
399+ waitForTasksToFinish()
400+ Utils .deleteRecursively(tableLoc)
401+ }
402+ }
403+
404+ test(" create a managed table with the existing non-empty directory" ) {
405+ withTable(" tab1" ) {
406+ val tableLoc = new File (spark.sessionState.catalog.defaultTablePath(TableIdentifier (" tab1" )))
407+ try {
408+ // create an empty hidden file
409+ tableLoc.mkdir()
410+ val hiddenGarbageFile = new File (tableLoc.getCanonicalPath, " .garbage" )
411+ hiddenGarbageFile.createNewFile()
412+ var ex = intercept[AnalysisException ] {
413+ sql(s " CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a' " )
414+ }.getMessage
415+ assert(ex.contains(" Can not create the managed table('`tab1`'). The associated location" ))
416+
417+ ex = intercept[AnalysisException ] {
418+ sql(s " CREATE TABLE tab1 (col1 int, col2 string) USING ${dataSource}" )
419+ }.getMessage
420+ assert(ex.contains(
421+ " Can not create the managed table('`default`.`tab1`'). The associated location" ))
422+ } finally {
423+ waitForTasksToFinish()
424+ Utils .deleteRecursively(tableLoc)
425+ }
426+ }
427+ }
428+
422429 private def checkSchemaInCreatedDataSourceTable (
423430 path : File ,
424431 userSpecifiedSchema : Option [String ],
0 commit comments