@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.datasources.parquet
1919
2020import java .util .Locale
2121
22- import org .apache .parquet .hadoop .ParquetOutputFormat
2322import org .apache .parquet .hadoop .metadata .CompressionCodecName
2423
2524import org .apache .spark .sql .catalyst .util .CaseInsensitiveMap
@@ -28,7 +27,7 @@ import org.apache.spark.sql.internal.SQLConf
2827/**
2928 * Options for the Parquet data source.
3029 */
31- class ParquetOptions (
30+ private [parquet] class ParquetOptions (
3231 @ transient private val parameters : CaseInsensitiveMap [String ],
3332 @ transient private val sqlConf : SQLConf )
3433 extends Serializable {
@@ -43,15 +42,8 @@ class ParquetOptions(
4342 * Acceptable values are defined in [[shortParquetCompressionCodecNames ]].
4443 */
4544 val compressionCodecClassName : String = {
46- // `compression`, `parquet.compression`(i.e., ParquetOutputFormat.COMPRESSION), and
47- // `spark.sql.parquet.compression.codec`
48- // are in order of precedence from highest to lowest.
49- val parquetCompressionConf = parameters.get(ParquetOutputFormat .COMPRESSION )
50- val codecName = parameters
51- .get(" compression" )
52- .orElse(parquetCompressionConf)
53- .getOrElse(sqlConf.parquetCompressionCodec)
54- .toLowerCase(Locale .ROOT )
45+ val codecName = parameters.getOrElse(" compression" ,
46+ sqlConf.parquetCompressionCodec).toLowerCase(Locale .ROOT )
5547 if (! shortParquetCompressionCodecNames.contains(codecName)) {
5648 val availableCodecs =
5749 shortParquetCompressionCodecNames.keys.map(_.toLowerCase(Locale .ROOT ))
0 commit comments