package com.jhhe.homework4_2

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object KMeans {
  case class Point(SepalLengthCm: Double,SepalWidthCm: Double,PetalLengthCm: Double,PetalWidthCm: Double) extends Serializable

  // 定义3个中心点(新中心点)
  var A:(String, Point) = randomPoint("A")
  var B:(String, Point) = randomPoint("B")
  var C:(String, Point) = randomPoint("C")

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("KMeans2")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    // 原中心点
    var a:(String, Point) = A
    var b:(String, Point) = B
    var c:(String, Point) = C

    // 循环次数
    var count = 0
    // 新中心点与原中心点距离总和
    var focusDisSum = Double.MaxValue

    // 定义退出循环条件
    def endLoop(times: Int, scale: Double) = focusDisSum < scale || count > times

    // 获取所有点的集合
    val points: RDD[Point] = sc.textFile("data/Iris.csv").map(parseData).filter(_ != null)

    var sortRdd: RDD[(String, Point)] = null
    // 循环求分类sortRdd
    while (!endLoop(Integer.MAX_VALUE, 0.00001d)) {
      // 遍历数据计算每一个点与3个中心点的距离，并归类(A,B,C)
      sortRdd = points.map{ point =>
        val d1 = distanceFocus(point, A)
        val d2 = distanceFocus(point, B)
        val d3 = distanceFocus(point, C)
        // 取距离最近的中心点
        val pointName = List(d1, d2, d3).sortBy(_._2).head._1
        (pointName, point)
      }

      // 根据分类后的点集重新计算中心点
      val newFocus: RDD[(String, Point)] = sortRdd.groupByKey().mapValues(getNewFocus)
      println(newFocus.count())

      a = A
      b = B
      c = C
      newFocus.foreach{case (name, point) =>
        name match {
          case "A" => A = (name, point)
          case "B" => B = (name, point)
          case "C" => C = (name, point)
        }
      }

      // 求新中心点与原中心点距离总和
      focusDisSum = distance(A._2, a._2) + distance(B._2, b._2) + distance(C._2, c._2)

      // 累计循环次数
      count += 1
    }

    println(s"循环次数：$count , 新中心点与原中心点距离总和：$focusDisSum")

    sortRdd.sortByKey().foreach(x =>
      println(s"${x._1}, ${x._2.SepalLengthCm},${x._2.SepalWidthCm},${x._2.PetalLengthCm},${x._2.PetalWidthCm}")
    )

    sc.stop()
  }

  /**
   * 根据点集计算新的中心点
   * @param iterable
   * @return
   */
  def getNewFocus(iterable: Iterable[Point]): Point = {
    var sepalLengthCmSum = 0d
    var sepalWidthCmSum = 0d
    var petalLengthCmSum = 0d
    var petalWidthCmSum = 0d
    for (point <- iterable) {
      sepalLengthCmSum += point.SepalLengthCm
      sepalWidthCmSum += point.SepalWidthCm
      petalLengthCmSum += point.PetalLengthCm
      petalWidthCmSum += point.PetalWidthCm
    }
    val size = iterable.size
    import java.text.DecimalFormat
    val df = new DecimalFormat("0.00000000")
    // 求所有点的平均值
    Point(df.format(sepalLengthCmSum/size).toDouble,
      df.format(sepalWidthCmSum/size).toDouble,
      df.format(petalLengthCmSum/size).toDouble,
      df.format(petalWidthCmSum/size).toDouble)
  }

  /**
   * 解析数据
   * @param line
   * @return
   */
  def parseData(line: String): Point = {
    try {
      val fields = line.split(",")
      Point(fields(1).toDouble, fields(2).toDouble, fields(3).toDouble, fields(4).toDouble)
    } catch {
      case _: Exception => null
    }
  }

  /**
   * 取随机中心点
   * @param pointName
   * @return
   */
  def randomPoint(pointName: String) = (pointName, Point(Random.nextDouble() * 10, Random.nextDouble() * 10, Random.nextDouble() * 10, Random.nextDouble() * 10))

  /**
   * 计算点到中心点间距离
   * @param p1
   * @param focus 中心点
   * @return (中心点name， 距离)
   */
  def distanceFocus(p1: Point, focus: (String, Point)): (String, Double) = {
    val dis = distance(p1, focus._2)
    (focus._1, dis)
  }

  /**
   * 计算两点之间的距离
   * @param p1
   * @param p2
   * @return
   */
  def distance(p1: Point, p2: Point): Double = {
    Math.sqrt(
      Math.pow(p1.PetalLengthCm - p2.PetalLengthCm, 2) +
        Math.pow(p1.PetalWidthCm - p2.PetalWidthCm, 2) +
        Math.pow(p1.SepalLengthCm - p2.SepalLengthCm, 2) +
        Math.pow(p1.SepalWidthCm - p2.SepalWidthCm, 2)
    )
  }
}
