# encoding=utf-8
import math
from pyspark import SparkContext


# K = int(input("Input K:"))
# P = input("Point(e.g x,y):").split(",")
K = 5
P = 50,0.1

sc = SparkContext(appName="kNN")
sc.setLogLevel("warn")
rdd = sc.textFile("file:///Users/liuqi/PycharmProjects/P1905/liuqi/spark/points-data.txt")
point = sc.broadcast((float(P[0]), float(P[1])))


# 数据转化
def map_line(data):
    x, y, label = data.split(",")
    return ((float(x), float(y)), label)


# 计算距离
def distance(p1, p2):
    return math.sqrt(math.pow(p2[0] - p1[0], 2) + math.pow(p2[1] - p1[1], 2))


# 转化字典
def map_to_distance(pt):
    dist = distance(pt[0], point.value)
    return dist, pt


rdd2 = rdd.map(map_line).map(map_to_distance).sortByKey()
rdd3 = sc.parallelize(rdd2.collect()[0:K])

pt = rdd3.map(lambda x: (x[1][1], x[0])).groupByKey().map(lambda d: (len(d[1]), d[0])).sortByKey(ascending=False).take(
    1)
print("The point {} is labeled {}".format(point.value, pt[0][1]))
