Skip to content

Commit ed94623

Browse files
committed
fix minor bugs
1 parent 30a01c9 commit ed94623

File tree

2 files changed

+39
-39
lines changed

2 files changed

+39
-39
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala

Lines changed: 30 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,35 @@ private[sql] object JDBCRDD extends Logging {
167167
new StructType(columns map { name => fieldMap(name) })
168168
}
169169

170+
/**
171+
* Converts value to SQL expression.
172+
*/
173+
private def compileValue(value: Any): Any = value match {
174+
case stringValue: String => s"'${escapeSql(stringValue)}'"
175+
case timestampValue: Timestamp => "'" + timestampValue + "'"
176+
case dateValue: Date => "'" + dateValue + "'"
177+
case _ => value
178+
}
179+
180+
private def escapeSql(value: String): String =
181+
if (value == null) null else StringUtils.replace(value, "'", "''")
182+
183+
/**
184+
* Turns a single Filter into a String representing a SQL expression.
185+
* Returns null for an unhandled filter.
186+
*/
187+
private def compileFilter(f: Filter): String = f match {
188+
case EqualTo(attr, value) => s"$attr = ${compileValue(value)}"
189+
case Not(EqualTo(attr, value)) => s"$attr != ${compileValue(value)}"
190+
case LessThan(attr, value) => s"$attr < ${compileValue(value)}"
191+
case GreaterThan(attr, value) => s"$attr > ${compileValue(value)}"
192+
case LessThanOrEqual(attr, value) => s"$attr <= ${compileValue(value)}"
193+
case GreaterThanOrEqual(attr, value) => s"$attr >= ${compileValue(value)}"
194+
case IsNull(attr) => s"$attr IS NULL"
195+
case IsNotNull(attr) => s"$attr IS NOT NULL"
196+
case _ => null
197+
}
198+
170199
/**
171200
* Given a driver string and an url, return a function that loads the
172201
* specified driver string then returns a connection to the JDBC url.
@@ -262,40 +291,12 @@ private[sql] class JDBCRDD(
262291
if (sb.length == 0) "1" else sb.substring(1)
263292
}
264293

265-
/**
266-
* Converts value to SQL expression.
267-
*/
268-
private def compileValue(value: Any): Any = value match {
269-
case stringValue: String => s"'${escapeSql(stringValue)}'"
270-
case timestampValue: Timestamp => "'" + timestampValue + "'"
271-
case dateValue: Date => "'" + dateValue + "'"
272-
case _ => value
273-
}
274-
275-
private def escapeSql(value: String): String =
276-
if (value == null) null else StringUtils.replace(value, "'", "''")
277-
278-
/**
279-
* Turns a single Filter into a String representing a SQL expression.
280-
* Returns null for an unhandled filter.
281-
*/
282-
private def compileFilter(f: Filter): String = f match {
283-
case EqualTo(attr, value) => s"$attr = ${compileValue(value)}"
284-
case Not(EqualTo(attr, value)) => s"$attr != ${compileValue(value)}"
285-
case LessThan(attr, value) => s"$attr < ${compileValue(value)}"
286-
case GreaterThan(attr, value) => s"$attr > ${compileValue(value)}"
287-
case LessThanOrEqual(attr, value) => s"$attr <= ${compileValue(value)}"
288-
case GreaterThanOrEqual(attr, value) => s"$attr >= ${compileValue(value)}"
289-
case IsNull(attr) => s"$attr IS NULL"
290-
case IsNotNull(attr) => s"$attr IS NOT NULL"
291-
case _ => null
292-
}
293294

294295
/**
295296
* `filters`, but as a WHERE clause suitable for injection into a SQL query.
296297
*/
297298
private val filterWhereClause: String = {
298-
val filterStrings = filters map compileFilter filter (_ != null)
299+
val filterStrings = filters map JDBCRDD.compileFilter filter (_ != null)
299300
if (filterStrings.size > 0) {
300301
val sb = new StringBuilder("WHERE ")
301302
filterStrings.foreach(x => sb.append(x).append(" AND "))

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,13 @@
1818
package org.apache.spark.sql.jdbc
1919

2020
import java.math.BigDecimal
21-
import java.sql.DriverManager
21+
import java.sql.{Date, DriverManager, Timestamp}
2222
import java.util.{Calendar, GregorianCalendar, Properties}
2323

2424
import org.h2.jdbc.JdbcSQLException
2525
import org.scalatest.BeforeAndAfter
2626
import org.scalatest.PrivateMethodTester
2727

28-
import org.apache.spark.Partition
2928
import org.apache.spark.SparkFunSuite
3029
import org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD
3130
import org.apache.spark.sql.test.SharedSQLContext
@@ -436,19 +435,19 @@ class JDBCSuite extends SparkFunSuite
436435
}
437436

438437
test("compile filters") {
439-
val jdbcRdd = JDBCRDD.scanTable(
440-
null, null, "", "", null, "", Array.empty[String], Array.empty[Filter], Array.empty[Partition])
441-
val compileFilter = PrivateMethod[Unit]('compileFilter)
442-
def doCompileFilter(f: Filter) = jdbcRdd invokePrivate compileFilter(f)
443-
438+
val compileFilter = PrivateMethod[String]('compileFilter)
439+
def doCompileFilter(f: Filter) = JDBCRDD invokePrivate compileFilter(f)
444440
assert(doCompileFilter(EqualTo("col0", 3)) === "col0 = 3")
445-
assert(doCompileFilter(Not(EqualTo("col1", "abc"))) === "col1 != abc")
441+
assert(doCompileFilter(Not(EqualTo("col1", "abc"))) === "col1 != 'abc'")
446442
assert(doCompileFilter(LessThan("col0", 5)) === "col0 < 5")
443+
assert(doCompileFilter(LessThan("col3",
444+
Timestamp.valueOf("1995-11-21 00:00:00.0"))) === "col3 < '1995-11-21 00:00:00.0'")
445+
assert(doCompileFilter(LessThan("col4", Date.valueOf("1983-08-04"))) === "col4 < '1983-08-04'")
447446
assert(doCompileFilter(LessThanOrEqual("col0", 5)) === "col0 <= 5")
448447
assert(doCompileFilter(GreaterThan("col0", 3)) === "col0 > 3")
449448
assert(doCompileFilter(GreaterThanOrEqual("col0", 3)) === "col0 >= 3")
450-
assert(doCompileFilter(IsNull("col1")) === "col0 IS NULL")
451-
assert(doCompileFilter(IsNotNull("col1")) === "col0 IS NOT NULL")
449+
assert(doCompileFilter(IsNull("col1")) === "col1 IS NULL")
450+
assert(doCompileFilter(IsNotNull("col1")) === "col1 IS NOT NULL")
452451
}
453452

454453
test("Dialect unregister") {

0 commit comments

Comments
 (0)