Skip to content

Commit bca3be8

Browse files
committed
Match the name of OrcRelation companion object to OrcFileFormat
1 parent 3eca283 commit bca3be8

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable
7070

7171
val configuration = job.getConfiguration
7272

73-
configuration.set(OrcRelation.ORC_COMPRESSION, orcOptions.compressionCodec)
73+
configuration.set(OrcFileFormat.ORC_COMPRESSION, orcOptions.compressionCodec)
7474
configuration match {
7575
case conf: JobConf =>
7676
conf.setOutputFormat(classOf[OrcOutputFormat])
@@ -110,7 +110,7 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable
110110
if (sparkSession.sessionState.conf.orcFilterPushDown) {
111111
// Sets pushed predicates
112112
OrcFilters.createFilter(requiredSchema, filters.toArray).foreach { f =>
113-
hadoopConf.set(OrcRelation.SARG_PUSHDOWN, f.toKryo)
113+
hadoopConf.set(OrcFileFormat.SARG_PUSHDOWN, f.toKryo)
114114
hadoopConf.setBoolean(ConfVars.HIVEOPTINDEXFILTER.varname, true)
115115
}
116116
}
@@ -129,7 +129,7 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable
129129
Iterator.empty
130130
} else {
131131
val physicalSchema = maybePhysicalSchema.get
132-
OrcRelation.setRequiredColumns(conf, physicalSchema, requiredSchema)
132+
OrcFileFormat.setRequiredColumns(conf, physicalSchema, requiredSchema)
133133

134134
val orcRecordReader = {
135135
val job = Job.getInstance(conf)
@@ -151,7 +151,7 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable
151151
Option(TaskContext.get()).foreach(_.addTaskCompletionListener(_ => recordsIterator.close()))
152152

153153
// Unwraps `OrcStruct`s to `UnsafeRow`s
154-
OrcRelation.unwrapOrcStructs(
154+
OrcFileFormat.unwrapOrcStructs(
155155
conf,
156156
requiredSchema,
157157
Some(orcRecordReader.getObjectInspector.asInstanceOf[StructObjectInspector]),
@@ -218,8 +218,8 @@ private[orc] class OrcOutputWriter(
218218

219219
override val path: String = {
220220
val compressionExtension: String = {
221-
val name = context.getConfiguration.get(OrcRelation.ORC_COMPRESSION)
222-
OrcRelation.extensionsForCompressionCodecNames.getOrElse(name, "")
221+
val name = context.getConfiguration.get(OrcFileFormat.ORC_COMPRESSION)
222+
OrcFileFormat.extensionsForCompressionCodecNames.getOrElse(name, "")
223223
}
224224
// It has the `.orc` extension at the end because (de)compression tools
225225
// such as gunzip would not be able to decompress this as the compression
@@ -257,7 +257,7 @@ private[orc] class OrcOutputWriter(
257257
}
258258
}
259259

260-
private[orc] object OrcRelation extends HiveInspectors {
260+
private[orc] object OrcFileFormat extends HiveInspectors {
261261
// The references of Hive's classes will be minimized.
262262
val ORC_COMPRESSION = "orc.compress"
263263

sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcOptions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ private[orc] class OrcOptions(@transient private val parameters: Map[String, Str
3333
// `orc.compress` is a ORC configuration. So, here we respect this as an option but
3434
// `compression` has higher precedence than `orc.compress`. It means if both are set,
3535
// we will use `compression`.
36-
val orcCompressionConf = parameters.get(OrcRelation.ORC_COMPRESSION)
36+
val orcCompressionConf = parameters.get(OrcFileFormat.ORC_COMPRESSION)
3737
val codecName = parameters
3838
.get("compression")
3939
.orElse(orcCompressionConf)

0 commit comments

Comments
 (0)