Skip to content

Commit 11999c7

Browse files
committed
Merge branch 'master' into vector
2 parents f7da54b + 8b3045c commit 11999c7

File tree

44 files changed

+2510
-70
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+2510
-70
lines changed

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@
150150
<artifactId>json4s-jackson_${scala.binary.version}</artifactId>
151151
<version>3.2.6</version>
152152
<!-- see also exclusion for lift-json; this is necessary since it depends on
153-
scala-library and scalap 2.10.0, but we use 2.10.3, and only override
153+
scala-library and scalap 2.10.0, but we use 2.10.4, and only override
154154
scala-library -->
155155
<exclusions>
156156
<exclusion>

core/src/main/scala/org/apache/spark/ui/JettyUtils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,14 @@
1818
package org.apache.spark.ui
1919

2020
import java.net.{InetSocketAddress, URL}
21+
import javax.servlet.DispatcherType
2122
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
2223

2324
import scala.annotation.tailrec
2425
import scala.util.{Failure, Success, Try}
2526
import scala.xml.Node
2627

27-
import org.eclipse.jetty.server.{DispatcherType, Server}
28+
import org.eclipse.jetty.server.Server
2829
import org.eclipse.jetty.server.handler._
2930
import org.eclipse.jetty.servlet._
3031
import org.eclipse.jetty.util.thread.QueuedThreadPool

core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ private[ui] class BlockManagerListener(storageStatusListener: StorageStatusListe
8484

8585
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) = synchronized {
8686
val rddInfo = stageSubmitted.stageInfo.rddInfo
87-
_rddInfoMap(rddInfo.id) = rddInfo
87+
_rddInfoMap.getOrElseUpdate(rddInfo.id, rddInfo)
8888
}
8989

9090
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted) = synchronized {

dev/audit-release/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ run them locally by setting appropriate environment variables.
44

55
```
66
$ cd sbt_app_core
7-
$ SCALA_VERSION=2.10.3 \
7+
$ SCALA_VERSION=2.10.4 \
88
SPARK_VERSION=1.0.0-SNAPSHOT \
99
SPARK_RELEASE_REPOSITORY=file:///home/patrick/.ivy2/local \
1010
sbt run

dev/audit-release/audit_release.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
RELEASE_KEY = "9E4FE3AF"
3636
RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1006/"
3737
RELEASE_VERSION = "1.0.0"
38-
SCALA_VERSION = "2.10.3"
38+
SCALA_VERSION = "2.10.4"
3939
SCALA_BINARY_VERSION = "2.10"
4040
##
4141

docker/spark-test/base/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ RUN apt-get update
2525
# install a few other useful packages plus Open Jdk 7
2626
RUN apt-get install -y less openjdk-7-jre-headless net-tools vim-tiny sudo openssh-server
2727

28-
ENV SCALA_VERSION 2.10.3
28+
ENV SCALA_VERSION 2.10.4
2929
ENV CDH_VERSION cdh4
3030
ENV SCALA_HOME /opt/scala-$SCALA_VERSION
3131
ENV SPARK_HOME /opt/spark

docs/_config.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ markdown: kramdown
66
SPARK_VERSION: 1.0.0-SNAPSHOT
77
SPARK_VERSION_SHORT: 1.0.0
88
SCALA_BINARY_VERSION: "2.10"
9-
SCALA_VERSION: "2.10.3"
9+
SCALA_VERSION: "2.10.4"
1010
MESOS_VERSION: 0.13.0
1111
SPARK_ISSUE_TRACKER_URL: https://spark-project.atlassian.net
1212
SPARK_GITHUB_URL: https://github.com/apache/spark

docs/running-on-yarn.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ The command to launch the Spark application on the cluster is as follows:
6161
SPARK_JAR=<SPARK_ASSEMBLY_JAR_FILE> ./bin/spark-class org.apache.spark.deploy.yarn.Client \
6262
--jar <YOUR_APP_JAR_FILE> \
6363
--class <APP_MAIN_CLASS> \
64-
--args <APP_MAIN_ARGUMENTS> \
64+
--arg <APP_MAIN_ARGUMENT> \
6565
--num-executors <NUMBER_OF_EXECUTOR_PROCESSES> \
6666
--driver-memory <MEMORY_FOR_ApplicationMaster> \
6767
--executor-memory <MEMORY_PER_EXECUTOR> \
@@ -72,7 +72,7 @@ The command to launch the Spark application on the cluster is as follows:
7272
--files <files_for_distributed_cache> \
7373
--archives <archives_for_distributed_cache>
7474

75-
For example:
75+
To pass multiple arguments the "arg" option can be specified multiple times. For example:
7676

7777
# Build the Spark assembly JAR and the Spark examples JAR
7878
$ SPARK_HADOOP_VERSION=2.0.5-alpha SPARK_YARN=true sbt/sbt assembly
@@ -85,7 +85,8 @@ For example:
8585
./bin/spark-class org.apache.spark.deploy.yarn.Client \
8686
--jar examples/target/scala-{{site.SCALA_BINARY_VERSION}}/spark-examples-assembly-{{site.SPARK_VERSION}}.jar \
8787
--class org.apache.spark.examples.SparkPi \
88-
--args yarn-cluster \
88+
--arg yarn-cluster \
89+
--arg 5 \
8990
--num-executors 3 \
9091
--driver-memory 4g \
9192
--executor-memory 2g \

0 commit comments

Comments
 (0)