from functools import reduce

from pyspark import SparkContext, SparkConf, AccumulatorParam

# initialization
from spark_example.deps.integer import Integer

conf = SparkConf().setAppName("KNN")

# sc = SparkContext(conf=conf)
sc = SparkContext.getOrCreate(conf=conf)
sc.addPyFile("/Users/sonto/Workspace/Rimi/P1902/spark_example/deps/integer.py")

rdd1 = sc.parallelize((1,))
rdd2 = sc.parallelize((4, 5, 6, 7, 8, 9, 10, 11, 12))

x = Integer(1)
x=set([1,2,3,4,5])
y=set([1,2,3])

x.difference(y)

class IntAccum(AccumulatorParam):
    def zero(self, value):
        return Integer(value)

    def addInPlace(self, value1, value2):
        if isinstance(value1, int):
            value1 = Integer(value1)

        value1 += value2
        if isinstance(value1, int):
            value1 = Integer(value1)

        return value1

accu = sc.accumulator(0, IntAccum())
rdd1.foreach(lambda v: accu.add(v))
print(accu.value.value)

import random
random.choices