Skip to content

Commit 44ec35a

Browse files
committed
Adding some comments.
1 parent 5eb2b7e commit 44ec35a

File tree

2 files changed

+10
-2
lines changed

2 files changed

+10
-2
lines changed

core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,10 @@ import org.apache.spark.util.ByteBufferInputStream
2525
private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
2626
val objOut = new ObjectOutputStream(out)
2727
var counter = 0;
28-
//Calling reset to avoid memory leak: http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
28+
/* Calling reset to avoid memory leak: http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
29+
* But only call it every 1000th time to avoid bloated serialization streams (when
30+
* the stream 'resets' object class descriptions have to be re-written)
31+
*/
2932
def writeObject[T](t: T): SerializationStream = {
3033
objOut.writeObject(t);
3134
if (counter >= 1000) {

core/src/test/scala/org/apache/spark/storage/LargeIteratorSuite.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,12 @@ object Expander {
3535
}
3636

3737
class LargeIteratorSuite extends FunSuite with LocalSparkContext {
38-
38+
/* Tests the ability of Spark to deal with user provided iterators that
39+
* generate more data then available memory. In any memory based persistance
40+
* Spark will unroll the iterator into an ArrayBuffer for caching, however in
41+
* the case that the use defines DISK_ONLY persistance, the iterator will be
42+
* fed directly to the serializer and written to disk.
43+
*/
3944
val clusterUrl = "local-cluster[1,1,512]"
4045
test("Flatmap iterator") {
4146
sc = new SparkContext(clusterUrl, "mem_test");

0 commit comments

Comments
 (0)