Skip to content

Commit 03bcb02

Browse files
committed
Merge branch 'SPARK-2670' of github.com:sarutak/spark into SPARK-2670
2 parents 5d05855 + 4fca130 commit 03bcb02

File tree

2 files changed

+157
-15
lines changed

2 files changed

+157
-15
lines changed

core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -199,21 +199,21 @@ object BlockFetcherIterator {
199199
// Get the local blocks while remote blocks are being fetched. Note that it's okay to do
200200
// these all at once because they will just memory-map some files, so they won't consume
201201
// any memory that might exceed our maxBytesInFlight
202-
for (id <- localBlocksToFetch) {
203-
try{
204-
getLocalFromDisk(id, serializer) match {
205-
case Some(iter) => {
206-
// Pass 0 as size since it's not in flight
207-
results.put(new FetchResult(id, 0, () => iter))
208-
logDebug("Got local block " + id)
209-
}
210-
case None => {
211-
throw new BlockException(id, "Could not get block " + id + " from local machine")
212-
}
213-
}
214-
} catch {
215-
case e: Exception => {
216-
logError(s"Error occurred while fetch local block $id", e)
202+
var fetchIndex = 0
203+
try {
204+
for (id <- localBlocksToFetch) {
205+
206+
// getLocalFromDisk never return None but throws BlockException
207+
val iter = getLocalFromDisk(id, serializer).get
208+
// Pass 0 as size since it's not in flight
209+
results.put(new FetchResult(id, 0, () => iter))
210+
fetchIndex += 1
211+
logDebug("Got local block " + id)
212+
}
213+
} catch {
214+
case e: Exception => {
215+
logError(s"Error occurred while fetching local blocks", e)
216+
for (id <- localBlocksToFetch.drop(fetchIndex)) {
217217
results.put(new FetchResult(id, -1, null))
218218
}
219219
}
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.storage
19+
20+
import org.scalatest.{FunSuite, Matchers}
21+
import org.scalatest.PrivateMethodTester._
22+
23+
import org.mockito.Mockito._
24+
import org.mockito.Matchers.{any, eq => meq}
25+
import org.mockito.stubbing.Answer
26+
import org.mockito.invocation.InvocationOnMock
27+
28+
import org.apache.spark._
29+
import org.apache.spark.storage.BlockFetcherIterator._
30+
import org.apache.spark.network.{ConnectionManager, ConnectionManagerId,
31+
Message}
32+
33+
class BlockFetcherIteratorSuite extends FunSuite with Matchers {
34+
35+
test("block fetch from local fails using BasicBlockFetcherIterator") {
36+
val blockManager = mock(classOf[BlockManager])
37+
val connManager = mock(classOf[ConnectionManager])
38+
doReturn(connManager).when(blockManager).connectionManager
39+
doReturn(BlockManagerId("test-client", "test-client", 1, 0)).when(blockManager).blockManagerId
40+
41+
doReturn((48 * 1024 * 1024).asInstanceOf[Long]).when(blockManager).maxBytesInFlight
42+
43+
val blIds = Array[BlockId](
44+
ShuffleBlockId(0,0,0),
45+
ShuffleBlockId(0,1,0),
46+
ShuffleBlockId(0,2,0),
47+
ShuffleBlockId(0,3,0),
48+
ShuffleBlockId(0,4,0))
49+
50+
val optItr = mock(classOf[Option[Iterator[Any]]])
51+
val answer = new Answer[Option[Iterator[Any]]] {
52+
override def answer(invocation: InvocationOnMock) = Option[Iterator[Any]] {
53+
throw new Exception
54+
}
55+
}
56+
57+
// 3rd block is going to fail
58+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(0)), any())
59+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(1)), any())
60+
doAnswer(answer).when(blockManager).getLocalFromDisk(meq(blIds(2)), any())
61+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(3)), any())
62+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(4)), any())
63+
64+
val bmId = BlockManagerId("test-client", "test-client",1 , 0)
65+
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
66+
(bmId, blIds.map(blId => (blId, 1.asInstanceOf[Long])).toSeq)
67+
)
68+
69+
val iterator = new BasicBlockFetcherIterator(blockManager,
70+
blocksByAddress, null)
71+
72+
iterator.initialize()
73+
74+
// 3rd getLocalFromDisk invocation should be failed
75+
verify(blockManager, times(3)).getLocalFromDisk(any(), any())
76+
77+
(iterator.hasNext) should be(true)
78+
// the 2nd element of the tuple returned by iterator.next should be defined when fetching successfully
79+
(iterator.next._2.isDefined) should be(true)
80+
(iterator.hasNext) should be(true)
81+
(iterator.next._2.isDefined) should be(true)
82+
(iterator.hasNext) should be(true)
83+
// 3rd fetch should be failed
84+
(iterator.next._2.isDefined) should be(false)
85+
(iterator.hasNext) should be(true)
86+
// And then, all of local fetches should be failed
87+
(iterator.next._2.isDefined) should be(false)
88+
(iterator.hasNext) should be(true)
89+
(iterator.next._2.isDefined) should be(false)
90+
}
91+
92+
93+
test("block fetch from local succeed using BasicBlockFetcherIterator") {
94+
val blockManager = mock(classOf[BlockManager])
95+
val connManager = mock(classOf[ConnectionManager])
96+
doReturn(connManager).when(blockManager).connectionManager
97+
doReturn(BlockManagerId("test-client", "test-client", 1, 0)).when(blockManager).blockManagerId
98+
99+
doReturn((48 * 1024 * 1024).asInstanceOf[Long]).when(blockManager).maxBytesInFlight
100+
101+
val blIds = Array[BlockId](
102+
ShuffleBlockId(0,0,0),
103+
ShuffleBlockId(0,1,0),
104+
ShuffleBlockId(0,2,0),
105+
ShuffleBlockId(0,3,0),
106+
ShuffleBlockId(0,4,0))
107+
108+
val optItr = mock(classOf[Option[Iterator[Any]]])
109+
110+
// All blocks should be fetched successfully
111+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(0)), any())
112+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(1)), any())
113+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(2)), any())
114+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(3)), any())
115+
doReturn(optItr).when(blockManager).getLocalFromDisk(meq(blIds(4)), any())
116+
117+
val bmId = BlockManagerId("test-client", "test-client",1 , 0)
118+
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
119+
(bmId, blIds.map(blId => (blId, 1.asInstanceOf[Long])).toSeq)
120+
)
121+
122+
val iterator = new BasicBlockFetcherIterator(blockManager,
123+
blocksByAddress, null)
124+
125+
iterator.initialize()
126+
127+
// getLocalFromDis should be invoked for all of 5 blocks
128+
verify(blockManager, times(5)).getLocalFromDisk(any(), any())
129+
130+
(iterator.hasNext) should be(true)
131+
(iterator.next._2.isDefined) should be(true)
132+
(iterator.hasNext) should be(true)
133+
(iterator.next._2.isDefined) should be(true)
134+
(iterator.hasNext) should be(true)
135+
(iterator.next._2.isDefined) should be(true)
136+
(iterator.hasNext) should be(true)
137+
(iterator.next._2.isDefined) should be(true)
138+
(iterator.hasNext) should be(true)
139+
(iterator.next._2.isDefined) should be(true)
140+
}
141+
142+
}

0 commit comments

Comments
 (0)