Skip to content

Commit 1884123

Browse files
committed
[SPARK-2318] When exiting on a signal, print the signal name first.
1 parent 76bbd18 commit 1884123

File tree

4 files changed

+13
-8
lines changed

4 files changed

+13
-8
lines changed

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ import org.apache.spark.deploy.master.ui.MasterWebUI
4141
import org.apache.spark.metrics.MetricsSystem
4242
import org.apache.spark.scheduler.{EventLoggingListener, ReplayListenerBus}
4343
import org.apache.spark.ui.SparkUI
44-
import org.apache.spark.util.{AkkaUtils, Utils}
44+
import org.apache.spark.util.{AkkaUtils, SignalLogger, Utils}
4545

4646
private[spark] class Master(
4747
host: String,
@@ -755,12 +755,13 @@ private[spark] class Master(
755755
}
756756
}
757757

758-
private[spark] object Master {
758+
private[spark] object Master extends Logging {
759759
val systemName = "sparkMaster"
760760
private val actorName = "Master"
761761
val sparkUrlRegex = "spark://([^:]+):([0-9]+)".r
762762

763763
def main(argStrings: Array[String]) {
764+
SignalLogger.register(log)
764765
val conf = new SparkConf
765766
val args = new MasterArguments(argStrings, conf)
766767
val (actorSystem, _, _) = startSystemAndActor(args.host, args.port, args.webUiPort, conf)

core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import org.apache.spark.deploy.DeployMessages._
3434
import org.apache.spark.deploy.master.{DriverState, Master}
3535
import org.apache.spark.deploy.worker.ui.WorkerWebUI
3636
import org.apache.spark.metrics.MetricsSystem
37-
import org.apache.spark.util.{AkkaUtils, Utils}
37+
import org.apache.spark.util.{AkkaUtils, SignalLogger, Utils}
3838

3939
/**
4040
* @param masterUrls Each url should look like spark://host:port.
@@ -365,8 +365,9 @@ private[spark] class Worker(
365365
}
366366
}
367367

368-
private[spark] object Worker {
368+
private[spark] object Worker extends Logging {
369369
def main(argStrings: Array[String]) {
370+
SignalLogger.register(log)
370371
val args = new WorkerArguments(argStrings)
371372
val (actorSystem, _) = startSystemAndActor(args.host, args.port, args.webUiPort, args.cores,
372373
args.memory, args.masters, args.workDir)

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.deploy.SparkHadoopUtil
3131
import org.apache.spark.deploy.worker.WorkerWatcher
3232
import org.apache.spark.scheduler.TaskDescription
3333
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
34-
import org.apache.spark.util.{AkkaUtils, Utils}
34+
import org.apache.spark.util.{AkkaUtils, SignalLogger, Utils}
3535

3636
private[spark] class CoarseGrainedExecutorBackend(
3737
driverUrl: String,
@@ -97,10 +97,12 @@ private[spark] class CoarseGrainedExecutorBackend(
9797
}
9898
}
9999

100-
private[spark] object CoarseGrainedExecutorBackend {
100+
private[spark] object CoarseGrainedExecutorBackend extends Logging {
101101
def run(driverUrl: String, executorId: String, hostname: String, cores: Int,
102102
workerUrl: Option[String]) {
103103

104+
SignalLogger.register(log)
105+
104106
SparkHadoopUtil.get.runAsSparkUser { () =>
105107
// Debug code
106108
Utils.checkHost(hostname)

core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _}
2525

2626
import org.apache.spark.{Logging, TaskState}
2727
import org.apache.spark.TaskState.TaskState
28-
import org.apache.spark.util.Utils
2928
import org.apache.spark.deploy.SparkHadoopUtil
29+
import org.apache.spark.util.{SignalLogger, Utils}
3030

3131
private[spark] class MesosExecutorBackend
3232
extends MesosExecutor
@@ -93,8 +93,9 @@ private[spark] class MesosExecutorBackend
9393
/**
9494
* Entry point for Mesos executor.
9595
*/
96-
private[spark] object MesosExecutorBackend {
96+
private[spark] object MesosExecutorBackend extends Logging {
9797
def main(args: Array[String]) {
98+
SignalLogger.register(log)
9899
SparkHadoopUtil.get.runAsSparkUser { () =>
99100
MesosNativeLibrary.load()
100101
// Create a new Executor and start it running

0 commit comments

Comments
 (0)