2626import sys
2727
2828import numpy as np
29- from numpy .random import rand
3029from numpy import matrix
30+ from numpy .random import rand
3131from pyspark .sql import SparkSession
3232
3333LAMBDA = 0.01 # regularization
@@ -62,10 +62,10 @@ def update(i, mat, ratings):
6262 example. Please use pyspark.ml.recommendation.ALS for more
6363 conventional use.""" , file = sys .stderr )
6464
65- spark = SparkSession \
66- .builder \
67- .appName ("PythonALS" )\
68- .getOrCreate ()
65+ spark = ( SparkSession
66+ .builder
67+ .appName ("PythonALS" )
68+ .getOrCreate () )
6969
7070 sc = spark .sparkContext
7171
@@ -87,17 +87,19 @@ def update(i, mat, ratings):
8787 usb = sc .broadcast (us )
8888
8989 for i in range (ITERATIONS ):
90- ms = sc .parallelize (range (M ), partitions ) \
91- .map (lambda x : update (x , usb .value , Rb .value )) \
92- .collect ()
90+ ms = (sc
91+ .parallelize (range (M ), partitions )
92+ .map (lambda x : update (x , usb .value , Rb .value ))
93+ .collect ())
9394 # collect() returns a list, so array ends up being
9495 # a 3-d array, we take the first 2 dims for the matrix
9596 ms = matrix (np .array (ms )[:, :, 0 ])
9697 msb = sc .broadcast (ms )
9798
98- us = sc .parallelize (range (U ), partitions ) \
99- .map (lambda x : update (x , msb .value , Rb .value .T )) \
100- .collect ()
99+ us = (sc
100+ .parallelize (range (U ), partitions )
101+ .map (lambda x : update (x , msb .value , Rb .value .T ))
102+ .collect ())
101103 us = matrix (np .array (us )[:, :, 0 ])
102104 usb = sc .broadcast (us )
103105
0 commit comments