@@ -306,20 +306,19 @@ def sequenceFile(self, name, key_class="org.apache.hadoop.io.Text", value_class=
306306 2. Serialization is attempted via Pyrolite pickling
307307 3. If this fails, the fallback is to call 'toString' on each key and value
308308 4. C{PickleSerializer} is used to deserialize pickled objects on the Python side
309-
310- >>> sc.sequenceFile("test_support/data/sfint/").collect()
309+ >>> sc.sequenceFile(tempdir + "/sftestdata/sfint/").collect()
311310 [(1, u'aa'), (2, u'bb'), (2, u'aa'), (3, u'cc'), (2, u'bb'), (1, u'aa')]
312- >>> sc.sequenceFile("test_support/data /sfdouble/").collect()
311+ >>> sc.sequenceFile(tempdir + "/sftestdata /sfdouble/").collect()
313312 [(1.0, u'aa'), (2.0, u'bb'), (2.0, u'aa'), (3.0, u'cc'), (2.0, u'bb'), (1.0, u'aa')]
314- >>> sc.sequenceFile("test_support/data /sftext/").collect()
313+ >>> sc.sequenceFile(tempdir + "/sftestdata /sftext/").collect()
315314 [(u'1', u'aa'), (u'2', u'bb'), (u'2', u'aa'), (u'3', u'cc'), (u'2', u'bb'), (u'1', u'aa')]
316- >>> sc.sequenceFile("test_support/data /sfbool/").collect()
315+ >>> sc.sequenceFile(tempdir + "/sftestdata /sfbool/").collect()
317316 [(1, True), (2, True), (2, False), (3, True), (2, False), (1, False)]
318- >>> sc.sequenceFile("test_support/data /sfnull/").collect()
317+ >>> sc.sequenceFile(tempdir + "/sftestdata /sfnull/").collect()
319318 [(1, None), (2, None), (2, None), (3, None), (2, None), (1, None)]
320- >>> sc.sequenceFile("test_support/data /sfmap/").collect()
319+ >>> sc.sequenceFile(tempdir + "/sftestdata /sfmap/").collect()
321320 [(1, {2.0: u'aa'}), (2, {3.0: u'bb'}), (2, {1.0: u'cc'}), (3, {2.0: u'dd'}), (2, {1.0: u'aa'}), (1, {3.0: u'bb'})]
322- >>> sc.sequenceFile("test_support/data /sfclass").first()
321+ >>> sc.sequenceFile(tempdir + "/sftestdata /sfclass").first()
323322 (u'1', {u'int': 123, u'double': 54.0, u'__class__': u'org.apache.spark.api.python.TestWritable', u'str': u'test1'})
324323 """
325324 minSplits = minSplits or min (self .defaultParallelism , 2 )
@@ -555,6 +554,7 @@ def _test():
555554 globs = globals ().copy ()
556555 globs ['sc' ] = SparkContext ('local[4]' , 'PythonTest' , batchSize = 2 )
557556 globs ['tempdir' ] = tempfile .mkdtemp ()
557+ globs ['sc' ]._jvm .WriteInputFormatTestDataGenerator .generateData (globs ['tempdir' ], globs ['sc' ]._jsc )
558558 atexit .register (lambda : shutil .rmtree (globs ['tempdir' ]))
559559 (failure_count , test_count ) = doctest .testmod (globs = globs )
560560 globs ['sc' ].stop ()
0 commit comments