Skip to content

Commit 9d3004d

Browse files
committed
Use BUFFER_WRITE_CHUNK_SIZE
1 parent 717f886 commit 9d3004d

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -321,6 +321,13 @@ package object config {
321321
.intConf
322322
.createWithDefault(3)
323323

324+
private[spark] val BUFFER_WRITE_CHUNK_SIZE =
325+
ConfigBuilder("spark.buffer.write.chunkSize")
326+
.internal()
327+
.doc("The block size limit when use ChunkedByteBuffer to writeFully bytes.")
328+
.bytesConf(ByteUnit.BYTE)
329+
.createWithDefault(64 * 1024 * 1024)
330+
324331
private[spark] val REDUCER_MAX_REQ_SIZE_SHUFFLE_TO_MEM =
325332
ConfigBuilder("spark.reducer.maxReqSizeShuffleToMem")
326333
.internal()
@@ -336,11 +343,4 @@ package object config {
336343
"spark.")
337344
.booleanConf
338345
.createWithDefault(false)
339-
340-
private[spark] val STORAGE_NIO_BUFFER_LIMIT =
341-
ConfigBuilder("spark.storage.nioBufferLimit")
342-
.internal()
343-
.doc("The block size limit when use ChunkedByteBuffer to writeFully bytes.")
344-
.bytesConf(ByteUnit.BYTE)
345-
.createWithDefault(64 * 1024 * 1024)
346346
}

core/src/main/scala/org/apache/spark/util/io/ChunkedByteBuffer.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ private[spark] class ChunkedByteBuffer(var chunks: Array[ByteBuffer]) {
4242
require(chunks.forall(_.position() == 0), "chunks' positions must be 0")
4343

4444
// Chunk size in bytes
45-
private val NIO_BUFFER_LIMIT = SparkEnv.get.conf.get(config.STORAGE_NIO_BUFFER_LIMIT)
45+
private val NIO_BUFFER_LIMIT = SparkEnv.get.conf.get(config.BUFFER_WRITE_CHUNK_SIZE)
4646

4747
private[this] var disposed: Boolean = false
4848

0 commit comments

Comments
 (0)