Skip to content

Commit 35efe6a

Browse files
committed
Remove throw which caused the bug.
1 parent f7d2143 commit 35efe6a

File tree

3 files changed

+6
-33
lines changed

3 files changed

+6
-33
lines changed

core/src/main/scala/org/apache/spark/storage/BlockFetchException.scala

Lines changed: 0 additions & 24 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -565,8 +565,9 @@ private[spark] class BlockManager(
565565
// Give up trying anymore locations. Either we've tried all of the original locations,
566566
// or we've refreshed the list of locations from the master, and have still
567567
// hit failures after trying locations from the refreshed list.
568-
throw new BlockFetchException(s"Failed to fetch block after" +
569-
s" ${totalFailureCount} fetch failures. Most recent failure cause:", e)
568+
logError(s"Failed to fetch block after $totalFailureCount fetch failures." +
569+
s"Most recent failure cause:", e)
570+
return None
570571
}
571572

572573
logWarning(s"Failed to fetch remote block $blockId " +

core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -513,10 +513,8 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
513513
assert(store.getRemoteBytes("list1").isDefined, "list1Get expected to be fetched")
514514
store3.stop()
515515
store3 = null
516-
// exception throw because there is no locations
517-
intercept[BlockFetchException] {
518-
store.getRemoteBytes("list1")
519-
}
516+
// Should return None instead of throwing an exception:
517+
assert(store.getRemoteBytes("list1").isEmpty)
520518
}
521519

522520
test("SPARK-14252: getOrElseUpdate should still read from remote storage") {
@@ -1186,9 +1184,7 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
11861184
new MockBlockTransferService(conf.getInt("spark.block.failures.beforeLocationRefresh", 5))
11871185
store = makeBlockManager(8000, "executor1", transferService = Option(mockBlockTransferService))
11881186
store.putSingle("item", 999L, StorageLevel.MEMORY_ONLY, tellMaster = true)
1189-
intercept[BlockFetchException] {
1190-
store.getRemoteBytes("item")
1191-
}
1187+
assert(store.getRemoteBytes("item").isEmpty)
11921188
}
11931189

11941190
test("SPARK-13328: refresh block locations (fetch should succeed after location refresh)") {

0 commit comments

Comments
 (0)