import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.ml.linalg.{DenseVector, Vector, Vectors}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{avg, col, collect_set, concat_ws, dense_rank, lit, when}
// 自定义轮廓系数的函数
class Profile_coefficient_gain(model: KMeansModel,predict: DataFrame) {
  val spark = SparkSession.builder().master("local[*]").getOrCreate()
  import spark.implicits._

  // 计算欧氏距离
  val ou_distance = spark.udf.register("Ou_distance", (v1: Vector, v2: Vector) => {
    math.sqrt(Vectors.sqdist(v1, v2))
  })

  // 所有中心点
  val clusterCenters = model.clusterCenters

  val catchVector = spark.udf.register("catchVector", (a: String, b: Int) => {
    Vectors.dense(a.split(";")(b).replace("[","").replace("]","").split(",").map(_.toDouble))
  })

  // 将每个簇的对应中心点加到表中，计算每个簇内的点和中心点的欧式距离
  var CenterData = predict.withColumn("centerVectorTest", lit(clusterCenters.toSeq.mkString(";")))
    .withColumn("centerVector", catchVector(col("centerVectorTest"), col("prediction")))
    .withColumn("interiorDifferent", ou_distance(col("features"), col("centerVector")))

  clusterCenters.indices.foreach(data=>{
    CenterData = CenterData.withColumn("center#"+data,catchVector(col("centerVectorTest"), lit(data)))
  })

  val center_collect =  CenterData.select("prediction","centerVector").sort("prediction").distinct()
  val center_collect1 =  CenterData.select(col("prediction").as("prediction1"),col("centerVector").as("centerVector1")).sort("prediction").distinct()

  // 计算每个中心点相距最近的一个中心点
  val near_center = center_collect.join(center_collect1)
    .withColumn("distance", ou_distance(col("centerVector"), center_collect1.col("centerVector1")))
    .where("distance != 0")
    .select("prediction", "centerVector", "prediction1", "distance")
    .withColumn("sort", dense_rank().over(Window.partitionBy("prediction").orderBy("distance")))
    .where("sort = 1").withColumn("center_predict_duiyin",concat_ws("-",col("prediction"),col("prediction1")))

  // center_predict_duiyin中的数据为类似1-0，含义是类型为1的簇的中心要去和最近的0的簇的所有点求欧式距离
  // ([1-0, 0-2, 2-0])k为3的时候

  // 将near_center和CenterData连起来
  val join_data = near_center.join(CenterData, "prediction").orderBy("prediction")

  val duiyin_all = join_data.select(collect_set("center_predict_duiyin")).head()

  //    println(duiyin_all.getAs[mutable.WrappedArray[String]](0).size) //  获取个数

  var diff = join_data.where("prediction = 10000").withColumn("center",lit("")).withColumn("avg_different",lit("")).select("center", "avg_different")
  duiyin_all.getSeq[String](0).sorted.foreach(num=>{
    diff = join_data.where(s"prediction = ${num.split("-")(1)}").select("prediction", "features", s"center#${num.split("-")(0)}")
      .withColumn("different_long", ou_distance(col("features"), col(s"center#${num.split("-")(0)}")))
      .withColumn("center", lit(s"${num.split("-")(0)}"))
      .groupBy("center").agg(avg("different_long").as("avg_different"))
      .select("center", "avg_different").union(diff)
  })

  val distance_different = diff
  // 将得到的外部的欧式距离和join_data连接起来
  val distance_all = join_data.join(distance_different, col("prediction") === distance_different.col("center"))
    .select("prediction", "interiorDifferent", "avg_different")
    .groupBy("prediction", "avg_different").agg(avg("interiorDifferent").as("avg_interiorDifferent"))
    .select("prediction", "avg_interiorDifferent", "avg_different")

  // 求得了簇内中心到每个点的平均距离和一个簇的中心与最近的那个簇的所有点的平均距离后，可以计算轮廓系数
  val all_profile = distance_all
    .withColumn("max_different"
      , when(col("avg_different") > col("avg_interiorDifferent"), col("avg_different")).otherwise(col("avg_interiorDifferent")))
    .withColumn("profile_coefficient", (col("avg_different") - col("avg_interiorDifferent")) / col("max_different"))

  val avg_Profile = all_profile.select(avg("profile_coefficient").as("avg_profile"))

}
