@@ -70,10 +70,17 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
7070 """ .stripMargin.replaceAll(" \n " , " " )).executeUpdate()
7171 conn.commit()
7272
73- conn.prepareStatement(" CREATE TABLE ts_with_timezone (id NUMBER(10), t TIMESTAMP WITH TIME ZONE)" )
74- .executeUpdate()
75- conn.prepareStatement(" INSERT INTO ts_with_timezone VALUES (1, to_timestamp_tz('1999-12-01 11:00:00 UTC','YYYY-MM-DD HH:MI:SS TZR'))" )
76- .executeUpdate()
73+ conn.prepareStatement(
74+ " CREATE TABLE ts_with_timezone (id NUMBER(10), t TIMESTAMP WITH TIME ZONE)" ).executeUpdate()
75+ conn.prepareStatement(
76+ " INSERT INTO ts_with_timezone VALUES " +
77+ " (1, to_timestamp_tz('1999-12-01 11:00:00 UTC','YYYY-MM-DD HH:MI:SS TZR'))" ).executeUpdate()
78+ conn.commit()
79+
80+ conn.prepareStatement(
81+ " CREATE TABLE custom_column_types (id NUMBER, n1 number(1), n2 number(1))" ).executeUpdate()
82+ conn.prepareStatement(
83+ " INSERT INTO custom_column_types values(12312321321321312312312312123, 1, 0)" ).executeUpdate()
7784 conn.commit()
7885
7986 sql(
@@ -198,4 +205,37 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
198205 val types = rows(0 ).toSeq.map(x => x.getClass.toString)
199206 assert(types(1 ).equals(" class java.sql.Timestamp" ))
200207 }
208+
209+ test(" SPARK-20427/SPARK-20921: read table use custom schema" ) {
210+
211+ // default will throw IllegalArgumentException
212+ val e = intercept[org.apache.spark.SparkException ] {
213+ spark.read.jdbc(jdbcUrl, " custom_column_types" , new Properties ()).collect()
214+ }
215+ assert(e.getMessage.contains(
216+ " requirement failed: Decimal precision 39 exceeds max precision 38" ))
217+
218+ // custom schema can read data
219+ val schema = StructType (Seq (
220+ StructField (" ID" , DecimalType (DecimalType .MAX_PRECISION , 0 ), true ,
221+ new MetadataBuilder ().putName(" ID" ).build()),
222+ StructField (" N1" , IntegerType , true , new MetadataBuilder ().putName(" N1" ).build()),
223+ StructField (" N2" , BooleanType , true , new MetadataBuilder ().putName(" N2" ).build())))
224+
225+ val dfRead = spark.read.schema(schema).jdbc(jdbcUrl, " custom_column_types" , new Properties ())
226+ val rows = dfRead.collect()
227+
228+ // verify the data type inserted
229+ val types = rows(0 ).toSeq.map(x => x.getClass.toString)
230+ assert(types(0 ).equals(" class java.math.BigDecimal" ))
231+ assert(types(1 ).equals(" class java.lang.Integer" ))
232+ assert(types(2 ).equals(" class java.lang.Boolean" ))
233+
234+ // verify the value inserted
235+ val values = rows(0 )
236+ assert(values.getDecimal(0 ).equals(new java.math.BigDecimal (" 12312321321321312312312312123" )))
237+ assert(values.getInt(1 ).equals(1 ))
238+ assert(values.getBoolean(2 ).equals(false ))
239+ }
240+
201241}
0 commit comments