@@ -54,6 +54,9 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
5454 conn1.prepareStatement(
5555 " create table test.people1 (name TEXT(32) NOT NULL, `the id` INTEGER NOT NULL)" )
5656 .executeUpdate()
57+ conn1.prepareStatement(
58+ " create table test.orders (`order` TEXT(32) NOT NULL, `order id` INTEGER NOT NULL)" )
59+ .executeUpdate()
5760 conn1.commit()
5861
5962 sql(
@@ -69,6 +72,13 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
6972 |USING org.apache.spark.sql.jdbc
7073 |OPTIONS (url ' $url1', dbtable 'TEST.PEOPLE1', user 'testUser', password 'testPass')
7174 """ .stripMargin.replaceAll(" \n " , " " ))
75+
76+ sql(
77+ s """
78+ |CREATE TEMPORARY TABLE ORDERS
79+ |USING org.apache.spark.sql.jdbc
80+ |OPTIONS (url ' $url1', dbtable 'TEST.ORDERS', user 'testUser', password 'testPass')
81+ """ .stripMargin.replaceAll(" \n " , " " ))
7282 }
7383
7484 after {
@@ -157,5 +167,8 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
157167 val df = sqlContext.createDataFrame(sparkContext.parallelize(arr2x2), schema2)
158168 df.write.insertInto(" PEOPLE1" )
159169 assert(2 === sqlContext.read.jdbc(url1, " TEST.PEOPLE1" , properties).count)
170+
171+ df.write.insertInto(" ORDERS" )
172+ assert(2 === sqlContext.read.jdbc(url1, " TEST.ORDERS" , properties).count)
160173 }
161174}
0 commit comments