from pyspark import SparkConf, SparkContext
from pyspark.streaming import StreamingContext


# configure spark cluster.
conf = SparkConf()
conf.setMaster("spark://10.2.3.41:7077")
conf.setAppName("Streaming Analysis")
conf.set("spark.executor.memory", "512m")
conf.set("spark.executor.cores", "2")

sc = SparkContext.getOrCreate(conf)
sc.setLogLevel("ERROR")

streaming = StreamingContext(sc, 2)
streaming.checkpoint("file:///media/psf/Home/Workspace/Rimi/P1901/lessons/spark/checkpoints")
dstream = streaming.socketTextStream("127.0.0.1", 6000)
dstream = dstream.window(6, 2)
dstream = dstream.flatMap(lambda x: x.split())

# def reduce_func(x, y):
#     print(x, y)
#     return x + y
#
# dstream = dstream.reduceByWindow(reduce_func, None, 6, 2)
dstream.pprint()

streaming.start()
streaming.awaitTermination()