from pyspark import SparkContext, SparkConf, AccumulatorParam

# initialization
conf = SparkConf().setMaster("local[2]").setAppName("KNN")
sc = SparkContext(conf=conf)
sc.setLogLevel("WARN")
# load text file that contains points
rdd = sc.textFile("file:///Users/sonto/Workspace/Rimi/P1902/spark_example/points.txt")
# sc.wholeTextFiles()

data = input("Please enter the point(e.g.X,Y):").split(",")
x, y = float(data[0]), float(data[1])

pt_bd = sc.broadcast((x, y))


def load_points(line):
    data = line.split(",")
    return [((float(data[0]), float(data[1])), data[2])]


rdd = rdd.flatMap(f=load_points)


def calculate_distance(pt1, pt2):
    import math
    return math.sqrt(math.pow(pt1[0] - pt2[0], 2) + math.pow(pt1[1] - pt2[1], 2))


def map_distance(v):
    pt, label = v
    distance = calculate_distance(pt, pt_bd.value)
    return [(distance, label)]


new_rdd = sc.parallelize(rdd.flatMap(f=map_distance).sortByKey().collect()[0:10])
new_rdd = new_rdd.groupBy(lambda v: v[1])

print(new_rdd.mapValues(lambda v: len(v)).sortBy(lambda v: v[1]).collect()[-1])


class Integer:
    def __init__(self, value):
        self.value = value

    def __iadd__(self, other):
        self.value += other.value
        return self

    def __add__(self, other):
        self.value += other.value
        return self


class IntAccum(AccumulatorParam):
    def zero(self, value):
        return Integer(0)

    def addInPlace(self, value1, value2):
        return value1 + value2
