Skip to content

Commit 037755c

Browse files
committed
Some changes after working with andrew or
1 parent f7d124f commit 037755c

File tree

10 files changed

+41
-29
lines changed

10 files changed

+41
-29
lines changed

core/src/main/scala/org/apache/spark/TaskEndReason.scala

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,47 +21,56 @@ import org.apache.spark.executor.TaskMetrics
2121
import org.apache.spark.storage.BlockManagerId
2222

2323
/**
24+
* <span class="developer badge">Developer API</span>
2425
* Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
2526
* tasks several times for "ephemeral" failures, and only report back failures that require some
2627
* old stages to be resubmitted, such as shuffle map fetch failures.
2728
*/
28-
private[spark] sealed trait TaskEndReason
2929

30-
private[spark] case object Success extends TaskEndReason
30+
sealed trait TaskEndReason
3131

32-
private[spark]
32+
/** <span class="developer badge">Developer API</span> */
33+
case object Success extends TaskEndReason
34+
35+
/** <span class="developer badge">Developer API</span> */
3336
case object Resubmitted extends TaskEndReason // Task was finished earlier but we've now lost it
3437

35-
private[spark] case class FetchFailed(
38+
/** <span class="developer badge">Developer API</span> */
39+
case class FetchFailed(
3640
bmAddress: BlockManagerId,
3741
shuffleId: Int,
3842
mapId: Int,
3943
reduceId: Int)
4044
extends TaskEndReason
4145

42-
private[spark] case class ExceptionFailure(
46+
/** <span class="developer badge">Developer API</span> */
47+
case class ExceptionFailure(
4348
className: String,
4449
description: String,
4550
stackTrace: Array[StackTraceElement],
4651
metrics: Option[TaskMetrics])
4752
extends TaskEndReason
4853

4954
/**
55+
* <span class="developer badge">Developer API</span>
5056
* The task finished successfully, but the result was lost from the executor's block manager before
5157
* it was fetched.
5258
*/
53-
private[spark] case object TaskResultLost extends TaskEndReason
59+
case object TaskResultLost extends TaskEndReason
5460

55-
private[spark] case object TaskKilled extends TaskEndReason
61+
/** <span class="developer badge">Developer API</span> */
62+
case object TaskKilled extends TaskEndReason
5663

5764
/**
65+
* <span class="developer badge">Developer API</span>
5866
* The task failed because the executor that it was running on was lost. This may happen because
5967
* the task crashed the JVM.
6068
*/
61-
private[spark] case object ExecutorLostFailure extends TaskEndReason
69+
case object ExecutorLostFailure extends TaskEndReason
6270

6371
/**
72+
* <span class="developer badge">Developer API</span>
6473
* We don't know why the task ended -- for example, because of a ClassNotFound exception when
6574
* deserializing the task result.
6675
*/
67-
private[spark] case object UnknownReason extends TaskEndReason
76+
case object UnknownReason extends TaskEndReason

core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,8 @@ class TaskMetrics extends Serializable {
8181
var updatedBlocks: Option[Seq[(BlockId, BlockStatus)]] = None
8282
}
8383

84-
object TaskMetrics {
85-
private[spark] def empty(): TaskMetrics = new TaskMetrics
84+
private[spark] object TaskMetrics {
85+
def empty(): TaskMetrics = new TaskMetrics
8686
}
8787

8888

core/src/main/scala/org/apache/spark/scheduler/JobResult.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,12 @@
1818
package org.apache.spark.scheduler
1919

2020
/**
21+
* <span class="developer badge">Developer API</span>
2122
* A result of a job in the DAGScheduler.
2223
*/
23-
private[spark] sealed trait JobResult
24+
sealed trait JobResult
2425

25-
private[spark] case object JobSucceeded extends JobResult
26+
case object JobSucceeded extends JobResult
2627

2728
// A failed stage ID of -1 means there is not a particular stage that caused the failure
28-
private[spark] case class JobFailed(exception: Exception, failedStageId: Int) extends JobResult
29+
case class JobFailed(exception: Exception, failedStageId: Int) extends JobResult

core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ package org.apache.spark.scheduler
2020
import org.apache.spark.storage.RDDInfo
2121

2222
/**
23+
* <span class="developer badge">Developer API</span>
2324
* Stores information about a stage to pass from the scheduler to SparkListeners.
2425
*/
25-
private[spark]
2626
class StageInfo(val stageId: Int, val name: String, val numTasks: Int, val rddInfo: RDDInfo) {
2727
/** When this stage was submitted from the DAGScheduler to a TaskScheduler. */
2828
var submissionTime: Option[Long] = None

core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
package org.apache.spark.scheduler
1919

2020
/**
21+
* <span class="developer badge">Developer API</span>
2122
* Information about a running task attempt inside a TaskSet.
2223
*/
23-
private[spark]
2424
class TaskInfo(
2525
val taskId: Long,
2626
val index: Int,
@@ -46,15 +46,15 @@ class TaskInfo(
4646

4747
var serializedSize: Int = 0
4848

49-
def markGettingResult(time: Long = System.currentTimeMillis) {
49+
private[spark] def markGettingResult(time: Long = System.currentTimeMillis) {
5050
gettingResultTime = time
5151
}
5252

53-
def markSuccessful(time: Long = System.currentTimeMillis) {
53+
private[spark] def markSuccessful(time: Long = System.currentTimeMillis) {
5454
finishTime = time
5555
}
5656

57-
def markFailed(time: Long = System.currentTimeMillis) {
57+
private[spark] def markFailed(time: Long = System.currentTimeMillis) {
5858
finishTime = time
5959
failed = true
6060
}
@@ -83,11 +83,11 @@ class TaskInfo(
8383

8484
def duration: Long = {
8585
if (!finished) {
86-
throw new UnsupportedOperationException("duration() called on unfinished tasks")
86+
throw new UnsupportedOperationException("duration() called on unfinished task")
8787
} else {
8888
finishTime - launchTime
8989
}
9090
}
9191

92-
def timeRunning(currentTime: Long): Long = currentTime - launchTime
92+
private[spark] def timeRunning(currentTime: Long): Long = currentTime - launchTime
9393
}

core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717

1818
package org.apache.spark.scheduler
1919

20-
private[spark] object TaskLocality extends Enumeration {
20+
/** <span class="developer badge">Developer API</span> */
21+
object TaskLocality extends Enumeration {
2122
// Process local is expected to be used ONLY within TaskSetManager for now.
2223
val PROCESS_LOCAL, NODE_LOCAL, RACK_LOCAL, ANY = Value
2324

core/src/main/scala/org/apache/spark/storage/StorageUtils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,8 @@ class StorageStatus(
4747
}
4848
}
4949

50-
private[spark]
50+
51+
/** <span class="developer badge">Developer API</span> */
5152
class RDDInfo(val id: Int, val name: String, val numPartitions: Int, val storageLevel: StorageLevel)
5253
extends Ordered[RDDInfo] {
5354

core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.util.collection
2020
import java.util.{Arrays, Comparator}
2121

2222
/**
23+
* <span class="developer badge">Developer API</span>
2324
* A simple open hash table optimized for the append-only use case, where keys
2425
* are never removed, but the value for each key may be changed.
2526
*
@@ -29,9 +30,8 @@ import java.util.{Arrays, Comparator}
2930
*
3031
* TODO: Cache the hash values of each key? java.util.HashMap does that.
3132
*/
32-
private[spark]
33-
class AppendOnlyMap[K, V](initialCapacity: Int = 64) extends Iterable[(K,
34-
V)] with Serializable {
33+
class AppendOnlyMap[K, V](initialCapacity: Int = 64)
34+
extends Iterable[(K, V)] with Serializable {
3535
require(initialCapacity <= (1 << 29), "Can't make capacity bigger than 2^29 elements")
3636
require(initialCapacity >= 1, "Invalid initial capacity")
3737

core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.apache.spark.serializer.Serializer
3131
import org.apache.spark.storage.{BlockId, BlockManager}
3232

3333
/**
34+
* <span class="developer badge">Developer API</span>
3435
* An append-only map that spills sorted content to disk when there is insufficient space for it
3536
* to grow.
3637
*
@@ -55,8 +56,7 @@ import org.apache.spark.storage.{BlockId, BlockManager}
5556
* `spark.shuffle.safetyFraction` specifies an additional margin of safety as a fraction of
5657
* this threshold, in case map size estimation is not sufficiently accurate.
5758
*/
58-
59-
private[spark] class ExternalAppendOnlyMap[K, V, C](
59+
class ExternalAppendOnlyMap[K, V, C](
6060
createCombiner: V => C,
6161
mergeValue: (C, V) => C,
6262
mergeCombiners: (C, C) => C,

core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,13 @@ package org.apache.spark.util.collection
2020
import scala.reflect.ClassTag
2121

2222
/**
23+
* <span class="developer badge">Developer API</span>
2324
* A fast hash map implementation for nullable keys. This hash map supports insertions and updates,
2425
* but not deletions. This map is about 5X faster than java.util.HashMap, while using much less
2526
* space overhead.
2627
*
2728
* Under the hood, it uses our OpenHashSet implementation.
2829
*/
29-
private[spark]
3030
class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: ClassTag](
3131
initialCapacity: Int)
3232
extends Iterable[(K, V)]

0 commit comments

Comments
 (0)