Skip to content

Commit ac710c7

Browse files
committed
Address the comments
Change-Id: Ia1ae58b27edce1283b507026cdc4c0bd3b35817c
1 parent 3c9120e commit ac710c7

File tree

4 files changed

+8
-9
lines changed

4 files changed

+8
-9
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -580,8 +580,8 @@ private[spark] object SparkConf extends Logging {
580580
DeprecatedConfig("spark.rpc", "2.0", "Not used any more."),
581581
DeprecatedConfig("spark.scheduler.executorTaskBlacklistTime", "2.1.0",
582582
"Please use the new blacklisting options, spark.blacklist.*"),
583-
DeprecatedConfig("spark.yarn.am.port", "2.2.1", "Not used any more"),
584-
DeprecatedConfig("spark.executor.port", "2.2.1", "Not used any more")
583+
DeprecatedConfig("spark.yarn.am.port", "2.0.0", "Not used any more"),
584+
DeprecatedConfig("spark.executor.port", "2.0.0", "Not used any more")
585585
)
586586

587587
Map(configs.map { cfg => (cfg.key -> cfg) } : _*)

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ object SparkEnv extends Logging {
177177
SparkContext.DRIVER_IDENTIFIER,
178178
bindAddress,
179179
advertiseAddress,
180-
port,
180+
Option(port),
181181
isLocal,
182182
numCores,
183183
ioEncryptionKey,
@@ -194,7 +194,6 @@ object SparkEnv extends Logging {
194194
conf: SparkConf,
195195
executorId: String,
196196
hostname: String,
197-
port: Int,
198197
numCores: Int,
199198
ioEncryptionKey: Option[Array[Byte]],
200199
isLocal: Boolean): SparkEnv = {
@@ -203,7 +202,7 @@ object SparkEnv extends Logging {
203202
executorId,
204203
hostname,
205204
hostname,
206-
port,
205+
None,
207206
isLocal,
208207
numCores,
209208
ioEncryptionKey
@@ -220,7 +219,7 @@ object SparkEnv extends Logging {
220219
executorId: String,
221220
bindAddress: String,
222221
advertiseAddress: String,
223-
port: Int,
222+
port: Option[Int],
224223
isLocal: Boolean,
225224
numUsableCores: Int,
226225
ioEncryptionKey: Option[Array[Byte]],
@@ -243,7 +242,7 @@ object SparkEnv extends Logging {
243242
}
244243

245244
val systemName = if (isDriver) driverSystemName else executorSystemName
246-
val rpcEnv = RpcEnv.create(systemName, bindAddress, advertiseAddress, port, conf,
245+
val rpcEnv = RpcEnv.create(systemName, bindAddress, advertiseAddress, port.getOrElse(-1), conf,
247246
securityManager, clientMode = !isDriver)
248247

249248
// Figure out which port RpcEnv actually bound to in case the original port is 0 or occupied.

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ private[spark] object CoarseGrainedExecutorBackend extends Logging {
220220
}
221221

222222
val env = SparkEnv.createExecutorEnv(
223-
driverConf, executorId, hostname, -1, cores, cfg.ioEncryptionKey, isLocal = false)
223+
driverConf, executorId, hostname, cores, cfg.ioEncryptionKey, isLocal = false)
224224

225225
env.rpcEnv.setupEndpoint("Executor", new CoarseGrainedExecutorBackend(
226226
env.rpcEnv, driverUrl, executorId, hostname, cores, userClassPath, env))

resource-managers/mesos/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ private[spark] class MesosExecutorBackend
7575
Seq[(String, String)](("spark.app.id", frameworkInfo.getId.getValue))
7676
val conf = new SparkConf(loadDefaults = true).setAll(properties)
7777
val env = SparkEnv.createExecutorEnv(
78-
conf, executorId, slaveInfo.getHostname, -1, cpusPerTask, None, isLocal = false)
78+
conf, executorId, slaveInfo.getHostname, cpusPerTask, None, isLocal = false)
7979

8080
executor = new Executor(
8181
executorId,

0 commit comments

Comments
 (0)