package cn.doitedu.profile.ml.knn

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

import scala.collection.mutable

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-06-23
 * @desc 手写数字识别
 */
object KnnDigitRecognize {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .config("spark.sql.shuffle.partitions","1")
      .appName("")
      .master("local")
      .getOrCreate()

    import spark.implicits._


    // 加载训练集
    val sampleText = spark.read.textFile("profile/data/knn/samples_vec/samples_vec.txt")
    val sampleVec: DataFrame = sampleText.map(s=>{
      val arr = s.split("_")
      (arr(0),arr(1).split(","))
    }).toDF("label","features")


    // 加载测试集
    val testText = spark.read.textFile("profile/data/knn/test_vec/test_vec.txt")
    val testVec: DataFrame = testText.map(s=>{
      val arr = s.split("_")
      (arr(0),arr(1).split(","))
    }).toDF("name","features")



    sampleVec.createTempView("sample")
    testVec.createTempView("test")

    val eudist = (vec1:mutable.WrappedArray[String], vec2:mutable.WrappedArray[String])=>{
      val v1 = vec1.map(s => s.toDouble)
      val v2 = vec2.map(s => s.toDouble)
      // [0, 0, 0, 0, 0, 0...
      // [0, 0, 0, 0, 0, 0...
      // [(0,0),(0,0),....]
      v1.zip(v2).map(tp=>Math.pow(tp._1-tp._2,2)).sum
    }


    spark.udf.register("eudist",eudist)

    val dist = spark.sql(
      """
        |select
        |test.name,
        |sample.label,
        |eudist(test.features,sample.features) as dist
        |
        |from test cross join sample
        |
        |""".stripMargin)

    /**
     * +----+-----+--------------------+--------------------+
     * |name|label|            test_vec|          sample_vec|
     * +----+-----+--------------------+--------------------+
     * |   a|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    7|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   a|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * |   b|    6|[0, 0, 0, 0, 0, 0...|[0, 0, 0, 0, 0, 0...|
     * +----+-----+--------------------+--------------------+
     */

    /**
     * +----+-----+-----+
     * |name|label| dist|
     * +----+-----+-----+
     * |   a|    7|140.0|
     * |   b|    7|291.0|
     * |   a|    7| 71.0|
     * |   b|    7|262.0|
     * |   a|    7|184.0|
     * |   b|    7|315.0|
     * |   a|    7| 67.0|
     * |   b|    7|238.0|
     * |   a|    6|242.0|
     * |   b|    6|241.0|
     * |   a|    6|221.0|
     * |   b|    6|132.0|
     * |   a|    6|279.0|
     * |   b|    6|302.0|
     * |   a|    6|204.0|
     * |   b|    6|179.0|
     * +----+-----+-----+
     */

    //dist.show(100,true)


    dist.createTempView("dist")
    spark.sql(
      """
        |select
        |  name,
        |  label,
        |  rn
        |from
        |(
        |select
        |  name,
        |  label,
        |  row_number() over(partition by  name order by dist) as rn
        |from dist
        |)
        |where rn<=3
        |
        |
        |
        |""".stripMargin).createTempView("top3")

    /**
     * +----+-----+---+
     * |name|label|rn |
     * +----+-----+---+
     * |a   |7    |1  |
     * |a   |7    |2  |
     * |a   |7    |3  |
     * |b   |6    |1  |
     * |b   |6    |2  |
     * |b   |7    |3  |
     * +----+-----+---+
     */
    val res = spark.sql(
      """
        |select
        |  name,
        |  label
        |from top3
        |group by name,label
        |having count(1)>=2
        |
        |""".stripMargin)

    res.show(100,false)

    spark.close()

  }
}
