package kk.learn.spark.work._5

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * <p>
 *
 * </p>
 *
 * @author KK
 * @since 2021-03-05
 */

// 使用Iris来保存鸢尾花记录
case class Iris(Id: Int,
                SepalLengthCm: Double, SepalWidthCm: Double,
                PetalLengthCm: Double, PetalWidthCm: Double,
                Species: String, var Computing: String
               )

object KMeans {

  /**
   * 求两个iris的距离
   * @param i1 第一个
   * @param i2 第二个
   * @return 它们的距离
   */
  def dist(i1:Iris, i2:Iris): Double = {
    Math.sqrt(
      Math.pow((i1.SepalLengthCm - i2.SepalLengthCm), 2)
        + Math.pow((i1.SepalWidthCm - i2.SepalWidthCm), 2)
        + Math.pow((i1.PetalLengthCm - i2.PetalLengthCm), 2)
        + Math.pow((i1.PetalWidthCm - i2.PetalWidthCm), 2)
    )
  }



  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("KMeans")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    // 读取文件为rdd
    val iris = spark.read.format("csv")
      .option("inferSchema", "true")
      .option("header", "true")
      .load("code/data/Iris.csv")
        .rdd
        .map(row =>
          Iris(row.getInt(0),
                  row.getDouble(1),
                  row.getDouble(2),
                  row.getDouble(3),
                  row.getDouble(4),
                  row.getString(5),
                  "") )

    // 生成随机点
    var centers = (for (i <- 0 until 3) yield Iris(i,
      3 + 5 * Math.random(),
      2 + 2 * Math.random(),
      6 * Math.random(),
      3 * Math.random(),
      "",
      "c" + i)).sortBy(_.Computing).toArray


    var count = 0;
    var endCal = false
    var irisWithComputing:RDD[Iris] = null
    while (!endCal && count < 200 ) {
      count += 1;
      // 遍历数据集， 看看数据属于那个中心点
      irisWithComputing = iris.map(i => {
        i.Computing =
          centers.map(center => (center, dist(i, center)))
            .minBy(t => t._2)._1.Computing
        i
      })


      // 计算新的中心点
      val newCenters = irisWithComputing.map(i => (i.Computing, i))
        .groupByKey()
        .mapValues(
          a =>
            Iris(-1,
              a.map(_.SepalLengthCm).sum / a.size,
              a.map(_.SepalWidthCm).sum / a.size,
              a.map(_.PetalLengthCm).sum / a.size,
              a.map(_.PetalWidthCm).sum / a.size,
              "",
              a.head.Computing
            )
        ).collect().map(_._2).sortBy(_.Computing)

      // 比较新的中心点和旧的中心点的距离
      var totalDist:Double = 0.0
      for (j <- centers.indices) {
        totalDist += dist(centers(j), newCenters(j))
      }
      if (totalDist < 0.1) {
        endCal = true
      }
      centers = newCenters

    }
    // 打印结果
    irisWithComputing.foreach(println(_))



    spark.close()
  }
}
