package com.lagou.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/*
假设k=9
文件A iris.csv 包含141条已知分类数据 | 文件B iris.data 包含9条未知分类
算法：1、读文件A、B，形成数据集X、Y
     2、求数据集Y中每个点到数据集X中每个点的距离D | 欧氏距离求D
     3、找到数据集D中最小的K个点
     4、求K个点的分类情况
 */

object KnnDemo {
    def main(args: Array[String]): Unit = {
      //创建SparkSession
      val spark = SparkSession.builder()
        .appName(this.getClass.getCanonicalName)
        .master("local[*]")
        .getOrCreate()
      spark.sparkContext.setLogLevel("warn")

      //读取文件A，形成数据集X
      val X: DataFrame = spark.read
        .option("header", "true")
        .option("delimiter", ",")
        .option("inferschema", "true")
        .csv("data/Iris.csv")
      //读取文件B，形成数据集Y
      val Y: DataFrame = spark.read
        .option("header", "true")
        .option("delimiter", ",")
        .option("inferschema", "true")
        .csv("data/Iristest.csv")

      //对x，y做笛卡尔积
      val value: RDD[(Row, Row)] = X.rdd.cartesian(Y.rdd)

      //计算x，y之间的欧氏距离 | ((x1-y1)^2 + ... + (xn-yn)^2)然后对结果作开方
      val D: RDD[(Int, (String, Double))] = value.map {
        case (x, y) => val xid = x.getInt(0)
          val x1 = x.getDouble(1)
          val x2 = x.getDouble(2)
          val x3 = x.getDouble(3)
          val x4 = x.getDouble(4)
          val xLabel = x.getString(5)
          val yid = y.getInt(0)
          val y1 = y.getDouble(1)
          val y2 = y.getDouble(2)
          val y3 = y.getDouble(3)
          val y4 = y.getDouble(4)
          val distance = Math.sqrt(Math.pow(x1 - y1, 2) + Math.pow(x2 - y2, 2)
            + Math.pow(x3 - y3, 2) + Math.pow(x4 - y4, 2))
          (yid, (xLabel, distance))
      }

      //设定k
      val k = 9
      //对计算出的欧氏距离进行聚合处理，遍历value
      val result: RDD[(Int, String)] = D.groupByKey().mapValues {
        line =>
          val tuples: List[(String, Double)] = line.toList.sortBy(_._2).take(k)
          val list: List[String] = tuples.groupBy(_._1).toList.map(_._1)
          list(0)
      }
      //输出结果
      result.foreach(x => println(s"${x._1}\t${x._2}"))

      //关闭SparkSession
      spark.close()
    }
}
