File tree Expand file tree Collapse file tree 2 files changed +8
-8
lines changed
core/src/main/scala/org/apache/spark Expand file tree Collapse file tree 2 files changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -321,6 +321,13 @@ package object config {
321321 .intConf
322322 .createWithDefault(3 )
323323
324+ private [spark] val BUFFER_WRITE_CHUNK_SIZE =
325+ ConfigBuilder (" spark.buffer.write.chunkSize" )
326+ .internal()
327+ .doc(" The block size limit when use ChunkedByteBuffer to writeFully bytes." )
328+ .bytesConf(ByteUnit .BYTE )
329+ .createWithDefault(64 * 1024 * 1024 )
330+
324331 private [spark] val REDUCER_MAX_REQ_SIZE_SHUFFLE_TO_MEM =
325332 ConfigBuilder (" spark.reducer.maxReqSizeShuffleToMem" )
326333 .internal()
@@ -336,11 +343,4 @@ package object config {
336343 " spark." )
337344 .booleanConf
338345 .createWithDefault(false )
339-
340- private [spark] val STORAGE_NIO_BUFFER_LIMIT =
341- ConfigBuilder (" spark.storage.nioBufferLimit" )
342- .internal()
343- .doc(" The block size limit when use ChunkedByteBuffer to writeFully bytes." )
344- .bytesConf(ByteUnit .BYTE )
345- .createWithDefault(64 * 1024 * 1024 )
346346}
Original file line number Diff line number Diff line change @@ -42,7 +42,7 @@ private[spark] class ChunkedByteBuffer(var chunks: Array[ByteBuffer]) {
4242 require(chunks.forall(_.position() == 0 ), " chunks' positions must be 0" )
4343
4444 // Chunk size in bytes
45- private val NIO_BUFFER_LIMIT = SparkEnv .get.conf.get(config.STORAGE_NIO_BUFFER_LIMIT )
45+ private val NIO_BUFFER_LIMIT = SparkEnv .get.conf.get(config.BUFFER_WRITE_CHUNK_SIZE )
4646
4747 private [this ] var disposed : Boolean = false
4848
You can’t perform that action at this time.
0 commit comments