@@ -20,7 +20,7 @@ package org.apache.spark.sql.hive
2020import java .io .IOException
2121import java .util .{List => JList }
2222
23- import com .google .common .cache .{CacheLoader , CacheBuilder }
23+ import com .google .common .cache .{LoadingCache , CacheLoader , CacheBuilder }
2424
2525import org .apache .hadoop .util .ReflectionUtils
2626import org .apache .hadoop .hive .metastore .TableType
@@ -54,46 +54,47 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
5454
5555 // TODO: Use this everywhere instead of tuples or databaseName, tableName,.
5656 /** A fully qualified identifier for a table (i.e., database.tableName) */
57- case class TableIdent (database : String , name : String ) {
58- def toLowerCase = TableIdent (database.toLowerCase, name.toLowerCase)
57+ case class QualifiedTableName (database : String , name : String ) {
58+ def toLowerCase = QualifiedTableName (database.toLowerCase, name.toLowerCase)
5959 }
6060
6161 /** A cache of Spark SQL data source tables that have been accessed. */
62- protected [hive] val cachedDataSourceTables = CacheBuilder .newBuilder()
63- .maximumSize(1000 )
64- .build(
65- new CacheLoader [TableIdent , LogicalPlan ]() {
66- override def load (in : TableIdent ): LogicalPlan = {
67- logDebug(s " Creating new cached data source for $in" )
68- val table = client.getTable(in.database, in.name)
69- val schemaString = table.getProperty(" spark.sql.sources.schema" )
70- val userSpecifiedSchema =
71- if (schemaString == null ) {
72- None
73- } else {
74- Some (DataType .fromJson(schemaString).asInstanceOf [StructType ])
75- }
76- // It does not appear that the ql client for the metastore has a way to enumerate all the
77- // SerDe properties directly...
78- val options = table.getTTable.getSd.getSerdeInfo.getParameters.toMap
79-
80- val resolvedRelation =
81- ResolvedDataSource (
82- hive,
83- userSpecifiedSchema,
84- table.getProperty(" spark.sql.sources.provider" ),
85- options)
86-
87- LogicalRelation (resolvedRelation.relation)
88- }
89- })
62+ protected [hive] val cachedDataSourceTables : LoadingCache [QualifiedTableName , LogicalPlan ] = {
63+ val cacheLoader = new CacheLoader [QualifiedTableName , LogicalPlan ]() {
64+ override def load (in : QualifiedTableName ): LogicalPlan = {
65+ logDebug(s " Creating new cached data source for $in" )
66+ val table = client.getTable(in.database, in.name)
67+ val schemaString = table.getProperty(" spark.sql.sources.schema" )
68+ val userSpecifiedSchema =
69+ if (schemaString == null ) {
70+ None
71+ } else {
72+ Some (DataType .fromJson(schemaString).asInstanceOf [StructType ])
73+ }
74+ // It does not appear that the ql client for the metastore has a way to enumerate all the
75+ // SerDe properties directly...
76+ val options = table.getTTable.getSd.getSerdeInfo.getParameters.toMap
77+
78+ val resolvedRelation =
79+ ResolvedDataSource (
80+ hive,
81+ userSpecifiedSchema,
82+ table.getProperty(" spark.sql.sources.provider" ),
83+ options)
84+
85+ LogicalRelation (resolvedRelation.relation)
86+ }
87+ }
88+
89+ CacheBuilder .newBuilder().maximumSize(1000 ).build(cacheLoader)
90+ }
9091
9192 def refreshTable (databaseName : String , tableName : String ): Unit = {
92- cachedDataSourceTables.refresh(TableIdent (databaseName, tableName).toLowerCase)
93+ cachedDataSourceTables.refresh(QualifiedTableName (databaseName, tableName).toLowerCase)
9394 }
9495
9596 def invalidateTable (databaseName : String , tableName : String ): Unit = {
96- cachedDataSourceTables.invalidate(TableIdent (databaseName, tableName).toLowerCase)
97+ cachedDataSourceTables.invalidate(QualifiedTableName (databaseName, tableName).toLowerCase)
9798 }
9899
99100 val caseSensitive : Boolean = false
@@ -143,7 +144,7 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
143144 val table = client.getTable(databaseName, tblName)
144145
145146 if (table.getProperty(" spark.sql.sources.provider" ) != null ) {
146- cachedDataSourceTables(TableIdent (databaseName, tblName).toLowerCase)
147+ cachedDataSourceTables(QualifiedTableName (databaseName, tblName).toLowerCase)
147148 } else if (table.isView) {
148149 // if the unresolved relation is from hive view
149150 // parse the text into logic node.
0 commit comments