package demo

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.types.{DataTypes, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}
import util.SparkUtil

import scala.collection.mutable

/**
 * KNN算法，分类问题应用案例
 */
object KNNDemoBMS {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark: SparkSession = SparkUtil.getSparkSession(this.getClass.getSimpleName)
    val schema: StructType = new StructType()
      .add("label", DataTypes.IntegerType)
      .add("f1", DataTypes.IntegerType)
      .add("f2", DataTypes.IntegerType)
      .add("f3", DataTypes.IntegerType) // 样本集（训练集）
      .add("f4", DataTypes.IntegerType)
      .add("f5", DataTypes.IntegerType)
    val sample: DataFrame = spark.read.schema(schema).option("header", value = true).csv("userprofile/data/KNN/sample.csv")
    val schema2: StructType = new StructType()
      .add("id", DataTypes.IntegerType)
      .add("f1", DataTypes.IntegerType)
      .add("f2", DataTypes.IntegerType)
      .add("f3", DataTypes.IntegerType) // 测试集
      .add("f4", DataTypes.IntegerType)
      .add("f5", DataTypes.IntegerType)
    val test: DataFrame = spark.read.option("header", value = true).schema(schema2).csv("userprofile/data/KNN/test.csv")
    sample.createTempView("sample")
    test.createTempView("test")
    spark.udf.register("Euclid", Euclid _)
    val tmpDF = spark.sql(
      """
        |with tmp as (
        |select id, label,
        |Euclid(array(a.f1,a.f2,a.f3,a.f4,a.f5), array(b.f1,b.f2,b.f3,b.f4,b.f5)) as Euclid
        |from test a cross join sample b )
        |select id, label from(
        |select id, label, Euclid,
        |row_number() over(partition by id order by Euclid) as rn
        |from tmp) o where rn <= 3
        |""".stripMargin)
    tmpDF.createTempView("tmp")
    spark.sql(
      """
        |with tmp as (select
        |id, sum(label) as res
        |from tmp group by id)
        |select id,
        |if(res>1, 1, 0) as label
        |from tmp order by id
        |""".stripMargin).show(100)
    spark.close()
  }

  // 计算欧式距离的函数
  def Euclid(feature1: mutable.WrappedArray[Int], feature2: mutable.WrappedArray[Int]): Double = {
    val tuples: mutable.WrappedArray[(Int, Int)] = feature1.zip(feature2)
    val doubles: mutable.WrappedArray[Double] = tuples.map(tp => Math.pow(tp._2 - tp._1, 2))
    val sum: Double = doubles.sum
    val d2: Double = Math.pow(sum, 0.5)
    1 / (d2 + 1)
  }
}
