Skip to content

Commit d50bdf7

Browse files
Changed call for getJDBCType to call both methods with datatype and metadata and one with only datatype.
1 parent a0cb024 commit d50bdf7

File tree

1 file changed

+23
-29
lines changed
  • sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc

1 file changed

+23
-29
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 23 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -169,14 +169,10 @@ object JdbcUtils extends Logging {
169169
val dialect = JdbcDialects.get(url)
170170
df.schema.fields foreach { field => {
171171
val name = field.name
172-
/* Modified getJDBCType with added parameter metadata
173-
* To override field metadata in Scala
174-
* import org.apache.spark.sql.types.MetadataBuilder
175-
* val metadata = new MetadataBuilder().putLong("maxlength", 10).build()
176-
* df.withColumn("colName", col("colName").as("colName", metadata)
177-
*/
172+
// Added getJDBCType with added parameter metadata
178173
val typ: String = dialect.getJDBCType(field.dataType, field.metadata)
179-
.map(_.databaseTypeDefinition).getOrElse(
174+
.map(_.databaseTypeDefinition).orElse(dialect.getJDBCType(field.dataType)
175+
.map(_.databaseTypeDefinition)).getOrElse(
180176
field.dataType match {
181177
case IntegerType => "INTEGER"
182178
case LongType => "BIGINT"
@@ -208,29 +204,27 @@ object JdbcUtils extends Logging {
208204
properties: Properties = new Properties()) {
209205
val dialect = JdbcDialects.get(url)
210206
val nullTypes: Array[Int] = df.schema.fields.map { field =>
211-
/* Modified getJDBCType with added parameter metadata
212-
* To override field metadata in Scala
213-
* import org.apache.spark.sql.types.MetadataBuilder
214-
* val metadata = new MetadataBuilder().putLong("maxlength", 10).build()
215-
* df.withColumn("colName", col("colName").as("colName", metadata)
216-
*/
217-
dialect.getJDBCType(field.dataType, field.metadata).map(_.jdbcNullType).getOrElse(
218-
field.dataType match {
219-
case IntegerType => java.sql.Types.INTEGER
220-
case LongType => java.sql.Types.BIGINT
221-
case DoubleType => java.sql.Types.DOUBLE
222-
case FloatType => java.sql.Types.REAL
223-
case ShortType => java.sql.Types.INTEGER
224-
case ByteType => java.sql.Types.INTEGER
225-
case BooleanType => java.sql.Types.BIT
226-
case StringType => java.sql.Types.CLOB
227-
case BinaryType => java.sql.Types.BLOB
228-
case TimestampType => java.sql.Types.TIMESTAMP
229-
case DateType => java.sql.Types.DATE
230-
case t: DecimalType => java.sql.Types.DECIMAL
231-
case _ => throw new IllegalArgumentException(
207+
// Added getJDBCType with added parameter metadata
208+
dialect.getJDBCType(field.dataType, field.metadata)
209+
.map(_.jdbcNullType).orElse(dialect.getJDBCType(field.dataType)
210+
.map(_.jdbcNullType)).getOrElse(
211+
field.dataType match {
212+
case IntegerType => java.sql.Types.INTEGER
213+
case LongType => java.sql.Types.BIGINT
214+
case DoubleType => java.sql.Types.DOUBLE
215+
case FloatType => java.sql.Types.REAL
216+
case ShortType => java.sql.Types.INTEGER
217+
case ByteType => java.sql.Types.INTEGER
218+
case BooleanType => java.sql.Types.BIT
219+
case StringType => java.sql.Types.CLOB
220+
case BinaryType => java.sql.Types.BLOB
221+
case TimestampType => java.sql.Types.TIMESTAMP
222+
case DateType => java.sql.Types.DATE
223+
case t: DecimalType => java.sql.Types.DECIMAL
224+
case _ => throw new IllegalArgumentException(
232225
s"Can't translate null value for field $field")
233-
})
226+
}
227+
)
234228
}
235229

236230
val rddSchema = df.schema

0 commit comments

Comments
 (0)