Skip to content

Commit cad4c70

Browse files
committed
Address the comments
Change-Id: Ic9ca657f54204d85fc2a441f15f39abf8cd0e277
1 parent 16ce99f commit cad4c70

File tree

2 files changed

+19
-20
lines changed

2 files changed

+19
-20
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -210,11 +210,15 @@ object SparkSubmit extends CommandLineUtils {
210210

211211
/**
212212
* Prepare the environment for submitting an application.
213-
* This returns a 4-tuple:
214-
* (1) the arguments for the child process,
215-
* (2) a list of classpath entries for the child,
216-
* (3) a map of system properties, and
217-
* (4) the main class for the child
213+
*
214+
* @param args the parsed SparkSubmitArguments used for environment preparation.
215+
* @param conf the Hadoop Configuration, this argument will only be set in unit test.
216+
* @return a 4-tuple:
217+
* (1) the arguments for the child process,
218+
* (2) a list of classpath entries for the child,
219+
* (3) a map of system properties, and
220+
* (4) the main class for the child
221+
*
218222
* Exposed for testing.
219223
*/
220224
private[deploy] def prepareSubmitEnvironment(
@@ -508,7 +512,7 @@ object SparkSubmit extends CommandLineUtils {
508512
}
509513
// Add the main application jar and any added jars to classpath in case YARN client
510514
// requires these jars.
511-
// This assumes both primaryResource and user jars are local jars, eitherwise it will not be
515+
// This assumes both primaryResource and user jars are local jars, otherwise it will not be
512516
// added to the classpath of YARN client.
513517
if (isYarnCluster) {
514518
if (isUserJar(args.primaryResource)) {

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -802,10 +802,7 @@ class SparkSubmitSuite
802802
test("downloadFile - file doesn't exist") {
803803
val hadoopConf = new Configuration()
804804
val tmpDir = Utils.createTempDir()
805-
// Set s3a implementation to local file system for testing.
806-
hadoopConf.set("fs.s3a.impl", "org.apache.spark.deploy.TestFileSystem")
807-
// Disable file system impl cache to make sure the test file system is picked up.
808-
hadoopConf.set("fs.s3a.impl.disable.cache", "true")
805+
updateConfWithFakeS3Fs(hadoopConf)
809806
intercept[FileNotFoundException] {
810807
SparkSubmit.downloadFile("s3a:/no/such/file", tmpDir, mutable.Map.empty, hadoopConf)
811808
}
@@ -826,10 +823,7 @@ class SparkSubmitSuite
826823
FileUtils.write(jarFile, content)
827824
val hadoopConf = new Configuration()
828825
val tmpDir = Files.createTempDirectory("tmp").toFile
829-
// Set s3a implementation to local file system for testing.
830-
hadoopConf.set("fs.s3a.impl", "org.apache.spark.deploy.TestFileSystem")
831-
// Disable file system impl cache to make sure the test file system is picked up.
832-
hadoopConf.set("fs.s3a.impl.disable.cache", "true")
826+
updateConfWithFakeS3Fs(hadoopConf)
833827
val sourcePath = s"s3a://${jarFile.getAbsolutePath}"
834828
val outputPath =
835829
SparkSubmit.downloadFile(sourcePath, tmpDir, mutable.Map.empty, hadoopConf)
@@ -844,10 +838,7 @@ class SparkSubmitSuite
844838
FileUtils.write(jarFile, content)
845839
val hadoopConf = new Configuration()
846840
val tmpDir = Files.createTempDirectory("tmp").toFile
847-
// Set s3a implementation to local file system for testing.
848-
hadoopConf.set("fs.s3a.impl", "org.apache.spark.deploy.TestFileSystem")
849-
// Disable file system impl cache to make sure the test file system is picked up.
850-
hadoopConf.set("fs.s3a.impl.disable.cache", "true")
841+
updateConfWithFakeS3Fs(hadoopConf)
851842
val sourcePaths = Seq("/local/file", s"s3a://${jarFile.getAbsolutePath}")
852843
val outputPaths = SparkSubmit.downloadFileList(
853844
sourcePaths.mkString(","), tmpDir, mutable.Map.empty, hadoopConf).split(",")
@@ -861,8 +852,7 @@ class SparkSubmitSuite
861852

862853
test("Avoid re-upload remote resources in yarn client mode") {
863854
val hadoopConf = new Configuration()
864-
hadoopConf.set("fs.s3a.impl", "org.apache.spark.deploy.TestFileSystem")
865-
hadoopConf.set("fs.s3a.impl.disable.cache", "true")
855+
updateConfWithFakeS3Fs(hadoopConf)
866856

867857
val tmpDir = Utils.createTempDir()
868858
val file = File.createTempFile("tmpFile", "", tmpDir)
@@ -936,6 +926,11 @@ class SparkSubmitSuite
936926
Utils.deleteRecursively(tmpDir)
937927
}
938928
}
929+
930+
private def updateConfWithFakeS3Fs(conf: Configuration): Unit = {
931+
conf.set("fs.s3a.impl", classOf[TestFileSystem].getCanonicalName)
932+
conf.set("fs.s3a.impl.disable.cache", "true")
933+
}
939934
}
940935

941936
object JarCreationTest extends Logging {

0 commit comments

Comments
 (0)