@@ -802,10 +802,7 @@ class SparkSubmitSuite
802802 test(" downloadFile - file doesn't exist" ) {
803803 val hadoopConf = new Configuration ()
804804 val tmpDir = Utils .createTempDir()
805- // Set s3a implementation to local file system for testing.
806- hadoopConf.set(" fs.s3a.impl" , " org.apache.spark.deploy.TestFileSystem" )
807- // Disable file system impl cache to make sure the test file system is picked up.
808- hadoopConf.set(" fs.s3a.impl.disable.cache" , " true" )
805+ updateConfWithFakeS3Fs(hadoopConf)
809806 intercept[FileNotFoundException ] {
810807 SparkSubmit .downloadFile(" s3a:/no/such/file" , tmpDir, mutable.Map .empty, hadoopConf)
811808 }
@@ -826,10 +823,7 @@ class SparkSubmitSuite
826823 FileUtils .write(jarFile, content)
827824 val hadoopConf = new Configuration ()
828825 val tmpDir = Files .createTempDirectory(" tmp" ).toFile
829- // Set s3a implementation to local file system for testing.
830- hadoopConf.set(" fs.s3a.impl" , " org.apache.spark.deploy.TestFileSystem" )
831- // Disable file system impl cache to make sure the test file system is picked up.
832- hadoopConf.set(" fs.s3a.impl.disable.cache" , " true" )
826+ updateConfWithFakeS3Fs(hadoopConf)
833827 val sourcePath = s " s3a:// ${jarFile.getAbsolutePath}"
834828 val outputPath =
835829 SparkSubmit .downloadFile(sourcePath, tmpDir, mutable.Map .empty, hadoopConf)
@@ -844,10 +838,7 @@ class SparkSubmitSuite
844838 FileUtils .write(jarFile, content)
845839 val hadoopConf = new Configuration ()
846840 val tmpDir = Files .createTempDirectory(" tmp" ).toFile
847- // Set s3a implementation to local file system for testing.
848- hadoopConf.set(" fs.s3a.impl" , " org.apache.spark.deploy.TestFileSystem" )
849- // Disable file system impl cache to make sure the test file system is picked up.
850- hadoopConf.set(" fs.s3a.impl.disable.cache" , " true" )
841+ updateConfWithFakeS3Fs(hadoopConf)
851842 val sourcePaths = Seq (" /local/file" , s " s3a:// ${jarFile.getAbsolutePath}" )
852843 val outputPaths = SparkSubmit .downloadFileList(
853844 sourcePaths.mkString(" ," ), tmpDir, mutable.Map .empty, hadoopConf).split(" ," )
@@ -861,8 +852,7 @@ class SparkSubmitSuite
861852
862853 test(" Avoid re-upload remote resources in yarn client mode" ) {
863854 val hadoopConf = new Configuration ()
864- hadoopConf.set(" fs.s3a.impl" , " org.apache.spark.deploy.TestFileSystem" )
865- hadoopConf.set(" fs.s3a.impl.disable.cache" , " true" )
855+ updateConfWithFakeS3Fs(hadoopConf)
866856
867857 val tmpDir = Utils .createTempDir()
868858 val file = File .createTempFile(" tmpFile" , " " , tmpDir)
@@ -936,6 +926,11 @@ class SparkSubmitSuite
936926 Utils .deleteRecursively(tmpDir)
937927 }
938928 }
929+
930+ private def updateConfWithFakeS3Fs (conf : Configuration ): Unit = {
931+ conf.set(" fs.s3a.impl" , classOf [TestFileSystem ].getCanonicalName)
932+ conf.set(" fs.s3a.impl.disable.cache" , " true" )
933+ }
939934}
940935
941936object JarCreationTest extends Logging {
0 commit comments