package com.lagou

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.math.{pow, sqrt}

object WorkFive {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark: SparkSession = SparkSession.builder()
      .appName(s"${this.getClass.getCanonicalName}")
      .master("local[*]")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext

    // 读取样本文件
    val sample: RDD[Array[Double]] = sc.textFile("data/source4/IrisKmeans.csv").map {
      line =>
        val fields = line.split(",")
        fields.init.tail.map(_.toDouble)
    }
    sample.cache()

    // 获得3个随机的中心点
    val centerPoints: Array[Array[Double]] = sample.takeSample(withReplacement = false, 3)
    // 新三中心 与 旧三中心 距离之和小于 0.001时停止计算
    val minDist = 0.001
    // 目前新三中心 与 旧三中心 距离之和（初始设置为1）
    var currentDist = 1.0
    while (currentDist > minDist) {
      // 计算所有点到K个点的距离；
      // 得到每个点的分类 [分类编号, (特征, 1.0)]；1.0 在后面计算中心点时用于计数
      val indexRDD: RDD[(Int, (Array[Double], Double))] = sample.map(p => (getIndex(p, centerPoints), (p, 1.0)))

      // 两点坐标维度值累加
      def arrayAdd(x: Array[Double], y: Array[Double]): Array[Double] = x.zip(y).map(elem => elem._1 + elem._2)

      // 相同类型点递归累加维度值 x=(x1+x2+..xn) , n=1+1+..+1
      val catalogRDD: RDD[(Int, (Array[Double], Double))] = indexRDD.reduceByKey((x, y) =>
        (arrayAdd(x._1, y._1), x._2 + y._2)
      )

      // 计算新的中心点 （x=(x1+x2+..xn)/n , y =(y1+y2+..yn)/n, ..., ）
      val newCenterPoints: collection.Map[Int, Array[Double]] =
        catalogRDD.map { case (index, (point, count)) => (index, point.map(_ / count)) }
          .collectAsMap()

      // 计算中心点移动的距离
      val dist = for (i <- 0 until 3) yield {
        getDistance(centerPoints(i), newCenterPoints(i))
      }
      currentDist = dist.sum

      // 重新定义中心点
      for ((key, value) <- newCenterPoints) {
        centerPoints(key) = value
      }
      println("currentDistSum = " + currentDist + "")
    }

    // 打印结果
    println("Final centers:")
    centerPoints.foreach(x => println(x.toBuffer))

    spark.close()
  }

  // 计算任意两点间距离
  private def getDistance(x: Array[Double], y: Array[Double]): Double = {
    sqrt(x.zip(y).map(elem => pow(elem._1 - elem._2, 2)).sum)
  }

  // 计算某个点与哪一个中心点更近
  private def getIndex(p: Array[Double], centers: Array[Array[Double]]): Int = {
    val dist = centers.map(point => getDistance(point, p))
    dist.indexOf(dist.min)
  }
}
