@@ -69,19 +69,22 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
6969 val table = synchronized {
7070 client.getTable(in.database, in.name)
7171 }
72- val schemaString = Option (table.getProperty(" spark.sql.sources.schema.numOfParts" )) match {
73- case Some (numOfParts) =>
74- val parts = (0 until numOfParts.toInt).map { index =>
75- Option (table.getProperty(s " spark.sql.sources.schema.part. ${index}" ))
76- .getOrElse(" Could not read schema from the metastore because it is corrupted." )
72+ val userSpecifiedSchema =
73+ Option (table.getProperty(" spark.sql.sources.schema.numParts" )).flatMap { numParts =>
74+ val parts = (0 until numParts.toInt).map { index =>
75+ val part = table.getProperty(s " spark.sql.sources.schema.part. ${index}" )
76+ if (part == null ) {
77+ throw new AnalysisException (
78+ " Could not read schema from the metastore because it is corrupted." )
79+ }
80+
81+ part
7782 }
78- // Stick all parts back to a single schema string in the JSON representation.
79- Some (parts.mkString)
80- case None => None // The schema was not defined.
83+ // Stick all parts back to a single schema string in the JSON representation
84+ // and convert it back to a StructType.
85+ Some ( DataType .fromJson(parts.mkString). asInstanceOf [ StructType ])
8186 }
8287
83- val userSpecifiedSchema =
84- schemaString.flatMap(s => Some (DataType .fromJson(s).asInstanceOf [StructType ]))
8588 // It does not appear that the ql client for the metastore has a way to enumerate all the
8689 // SerDe properties directly...
8790 val options = table.getTTable.getSd.getSerdeInfo.getParameters.toMap
@@ -129,10 +132,9 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
129132 val schemaJsonString = userSpecifiedSchema.get.json
130133 // Split the JSON string.
131134 val parts = schemaJsonString.grouped(threshold).toSeq
132- tbl.setProperty(" spark.sql.sources.schema.numOfParts" , parts.size.toString)
133- parts.zipWithIndex.foreach {
134- case (part, index) =>
135- tbl.setProperty(s " spark.sql.sources.schema.part. ${index}" , part)
135+ tbl.setProperty(" spark.sql.sources.schema.numParts" , parts.size.toString)
136+ parts.zipWithIndex.foreach { case (part, index) =>
137+ tbl.setProperty(s " spark.sql.sources.schema.part. ${index}" , part)
136138 }
137139 }
138140 options.foreach { case (key, value) => tbl.setSerdeParam(key, value) }
0 commit comments