Skip to content

Commit 134d6e9

Browse files
committed
fix.
1 parent 99f4e1b commit 134d6e9

File tree

5 files changed

+24
-6
lines changed

5 files changed

+24
-6
lines changed

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
192192
checkRow(sql("SELECT * FROM datetime1 where id = 1").head())
193193
}
194194

195-
test("SPARK-20557: column type TIMEZONE with TIME STAMP should be recognized") {
195+
test("SPARK-20557: column type TIMESTAMP with TIME ZONE should be recognized") {
196196
val dfRead = sqlContext.read.jdbc(jdbcUrl, "ts_with_timezone", new Properties)
197197
val rows = dfRead.collect()
198198
val types = rows(0).toSeq.map(x => x.getClass.toString)

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,13 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
5555
+ "null, null, null, null, null, "
5656
+ "null, null, null, null, null, null, null)"
5757
).executeUpdate()
58+
59+
conn.prepareStatement("CREATE TABLE ts_with_timezone " +
60+
"(id integer, tstz TIMESTAMP WITH TIME ZONE, ttz TIME WITH TIME ZONE)")
61+
.executeUpdate()
62+
conn.prepareStatement("INSERT INTO ts_with_timezone VALUES " +
63+
"(1, TIMESTAMP WITH TIME ZONE '2016-08-12 10:22:31.949271-07', TIME WITH TIME ZONE '17:22:31.949271+00')")
64+
.executeUpdate()
5865
}
5966

6067
test("Type mapping for various types") {
@@ -126,4 +133,12 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
126133
assert(schema(0).dataType == FloatType)
127134
assert(schema(1).dataType == ShortType)
128135
}
136+
137+
test("SPARK-20557: column type TIMESTAMP with TIME ZONE and TIME with TIME ZONE should be recognized") {
138+
val dfRead = sqlContext.read.jdbc(jdbcUrl, "ts_with_timezone", new Properties)
139+
val rows = dfRead.collect()
140+
val types = rows(0).toSeq.map(x => x.getClass.toString)
141+
assert(types(1).equals("class java.sql.Timestamp"))
142+
assert(types(2).equals("class java.sql.Timestamp"))
143+
}
129144
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,19 +217,23 @@ object JdbcUtils extends Logging {
217217
case java.sql.Types.OTHER => null
218218
case java.sql.Types.REAL => DoubleType
219219
case java.sql.Types.REF => StringType
220+
case java.sql.Types.REF_CURSOR => null
220221
case java.sql.Types.ROWID => LongType
221222
case java.sql.Types.SMALLINT => IntegerType
222223
case java.sql.Types.SQLXML => StringType
223224
case java.sql.Types.STRUCT => StringType
224225
case java.sql.Types.TIME => TimestampType
226+
case java.sql.Types.TIME_WITH_TIMEZONE
227+
=> TimestampType
225228
case java.sql.Types.TIMESTAMP => TimestampType
226229
case java.sql.Types.TIMESTAMP_WITH_TIMEZONE
227230
=> TimestampType
228231
case -101 => TimestampType // Value for Timestamp with Time Zone in Oracle
229232
case java.sql.Types.TINYINT => IntegerType
230233
case java.sql.Types.VARBINARY => BinaryType
231234
case java.sql.Types.VARCHAR => StringType
232-
case _ => null
235+
case _ =>
236+
throw new SQLException("Unrecognized SQL type " + sqlType)
233237
// scalastyle:on
234238
}
235239

sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ import org.apache.spark.sql.types.StructType
3333
import org.apache.spark.storage.StorageLevel
3434

3535

36-
3736
/**
3837
* Internal implementation of the user-facing `Catalog`.
3938
*/

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import java.util.{Calendar, GregorianCalendar, Properties}
2424
import org.h2.jdbc.JdbcSQLException
2525
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
2626

27-
import org.apache.spark.SparkFunSuite
27+
import org.apache.spark.{SparkException, SparkFunSuite}
2828
import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
2929
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
3030
import org.apache.spark.sql.execution.DataSourceScanExec
@@ -929,10 +929,10 @@ class JDBCSuite extends SparkFunSuite
929929
}
930930

931931
test("unsupported types") {
932-
var e = intercept[SQLException] {
932+
var e = intercept[SparkException] {
933933
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMEZONE", new Properties()).collect()
934934
}.getMessage
935-
assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE"))
935+
assert(e.contains("java.lang.UnsupportedOperationException: unimplemented"))
936936
e = intercept[SQLException] {
937937
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY", new Properties()).collect()
938938
}.getMessage

0 commit comments

Comments
 (0)