File tree Expand file tree Collapse file tree 2 files changed +10
-2
lines changed
main/scala/org/apache/spark/serializer
test/scala/org/apache/spark/storage Expand file tree Collapse file tree 2 files changed +10
-2
lines changed Original file line number Diff line number Diff line change @@ -25,7 +25,10 @@ import org.apache.spark.util.ByteBufferInputStream
2525private [spark] class JavaSerializationStream (out : OutputStream ) extends SerializationStream {
2626 val objOut = new ObjectOutputStream (out)
2727 var counter = 0 ;
28- // Calling reset to avoid memory leak: http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
28+ /* Calling reset to avoid memory leak: http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
29+ * But only call it every 1000th time to avoid bloated serialization streams (when
30+ * the stream 'resets' object class descriptions have to be re-written)
31+ */
2932 def writeObject [T ](t : T ): SerializationStream = {
3033 objOut.writeObject(t);
3134 if (counter >= 1000 ) {
Original file line number Diff line number Diff line change @@ -35,7 +35,12 @@ object Expander {
3535}
3636
3737class LargeIteratorSuite extends FunSuite with LocalSparkContext {
38-
38+ /* Tests the ability of Spark to deal with user provided iterators that
39+ * generate more data then available memory. In any memory based persistance
40+ * Spark will unroll the iterator into an ArrayBuffer for caching, however in
41+ * the case that the use defines DISK_ONLY persistance, the iterator will be
42+ * fed directly to the serializer and written to disk.
43+ */
3944 val clusterUrl = " local-cluster[1,1,512]"
4045 test(" Flatmap iterator" ) {
4146 sc = new SparkContext (clusterUrl, " mem_test" );
You can’t perform that action at this time.
0 commit comments