Skip to content

Commit 616d111

Browse files
committed
Merge remote-tracking branch 'upstream/master'
2 parents 35c1884 + 7e758d7 commit 616d111

File tree

69 files changed

+2945
-588
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+2945
-588
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
*.ipr
66
*.iml
77
*.iws
8+
*.pyc
89
.idea/
910
.idea_modules/
1011
sbt/*.jar
@@ -49,6 +50,8 @@ dependency-reduced-pom.xml
4950
checkpoint
5051
derby.log
5152
dist/
53+
dev/create-release/*txt
54+
dev/create-release/*new
5255
spark-*-bin-*.tgz
5356
unit-tests.log
5457
/lib/

core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,7 @@ private[spark] class AppClient(
134134
val fullId = appId + "/" + id
135135
logInfo("Executor added: %s on %s (%s) with %d cores".format(fullId, workerId, hostPort,
136136
cores))
137+
master ! ExecutorStateChanged(appId, id, ExecutorState.RUNNING, None, None)
137138
listener.executorAdded(fullId, workerId, hostPort, cores, memory)
138139

139140
case ExecutorUpdated(id, state, message, exitStatus) =>

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,8 +144,6 @@ private[spark] class ExecutorRunner(
144144
Files.write(header, stderr, UTF_8)
145145
stderrAppender = FileAppender(process.getErrorStream, stderr, conf)
146146

147-
state = ExecutorState.RUNNING
148-
worker ! ExecutorStateChanged(appId, execId, state, None, None)
149147
// Wait for it to exit; executor may exit with code 0 (when driver instructs it to shutdown)
150148
// or with nonzero exit code
151149
val exitCode = process.waitFor()

core/src/main/scala/org/apache/spark/rdd/BinaryFileRDD.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ import org.apache.spark.input.StreamFileInputFormat
2424
import org.apache.spark.{ Partition, SparkContext }
2525

2626
private[spark] class BinaryFileRDD[T](
27-
sc: SparkContext,
28-
inputFormatClass: Class[_ <: StreamFileInputFormat[T]],
29-
keyClass: Class[String],
30-
valueClass: Class[T],
31-
@transient conf: Configuration,
32-
minPartitions: Int)
27+
sc: SparkContext,
28+
inputFormatClass: Class[_ <: StreamFileInputFormat[T]],
29+
keyClass: Class[String],
30+
valueClass: Class[T],
31+
@transient conf: Configuration,
32+
minPartitions: Int)
3333
extends NewHadoopRDD[String, T](sc, inputFormatClass, keyClass, valueClass, conf) {
3434

3535
override def getPartitions: Array[Partition] = {

core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala

Lines changed: 0 additions & 35 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala

Lines changed: 0 additions & 34 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala

Lines changed: 0 additions & 35 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala

Lines changed: 0 additions & 31 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala

Lines changed: 0 additions & 32 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala

Lines changed: 0 additions & 33 deletions
This file was deleted.

0 commit comments

Comments
 (0)