@@ -185,10 +185,16 @@ class HadoopTableReader(
185185 val partitionName = partition.getName
186186 val partNum = Utilities .getPartitionDesc(partition).getPartSpec.size();
187187 var pathPatternStr = getPathPatternByPath(partNum, partPath, partitionName)
188+ // scalastyle:off println
189+ println(s " === $partPath, $partitionName, $partNum, $pathPatternStr=== " )
188190 if (! pathPatternSet.contains(pathPatternStr)) {
189191 pathPatternSet += pathPatternStr
190192 updateExistPathSetByPathPattern(pathPatternStr)
191193 }
194+ // scalastyle:off println
195+ println(s " =1== ${existPathSet.size}, ${partPath.toString}, " +
196+ s " ${existPathSet.contains(partPath.toString)}=== " )
197+
192198 existPathSet.contains(partPath.toString)
193199 }
194200 }
@@ -198,6 +204,11 @@ class HadoopTableReader(
198204 val partDesc = Utilities .getPartitionDesc(partition)
199205 val partPath = partition.getDataLocation
200206 val inputPathStr = applyFilterIfNeeded(partPath, filterOpt)
207+
208+ // scalastyle:off println
209+ println(s " =1== ${inputPathStr}, ${partPath.toString}, " +
210+ s " ${if (filterOpt.isDefined) filterOpt.get.toString}=== " )
211+
201212 val ifc = partDesc.getInputFileFormatClass
202213 .asInstanceOf [java.lang.Class [InputFormat [Writable , Writable ]]]
203214 // Get partition field info
0 commit comments