package cn.doitedu.ml.knn

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

object HandWriteDigits {
  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder().appName("手写数字识别").master("local").getOrCreate()

    import spark.implicits._

    val sample = spark.read.textFile("userprofile/data/knndemo/sample_vec_text/sampl.txt")
    val test = spark.read.textFile("userprofile/data/knndemo/test_vec_text/sampl.txt")

    val s = sample.map(line=>{
      val split = line.split(",")
      val label = split(0)

      val featureVector = split.slice(1, split.length)
      (label,featureVector)
    }).toDF("label","vec")

    val t = test.map(line=>{
      val split = line.split(",")
      val imageName = split(0)

      val featureVector = split.slice(1, split.length)
      (imageName,featureVector)
    }).toDF("imagename","vec")


    // s.show(100,false)
    // t.show(100,false)

    s.createTempView("sample")
    t.createTempView("test")

   val joined =  spark.sql(
      """
        |
        |select
        |sample.label,
        |sample.vec as sample_vec,
        |test.imagename,
        |test.vec as test_vec
        |
        |from sample cross join test
        |
        |""".stripMargin)

    // joined.show(100,false)
    joined.createTempView("joind")

    val eudist = (vec1:mutable.WrappedArray[String], vec2:mutable.WrappedArray[String]) =>{
      //vec1:[0,1,0,1,1,1,0]
      //vec2:[1,1,0,1,0,0,0]
      //[(0,1),(1,1)]
      //[1,0,....]
      val d2 = vec1.map(s=>s.toDouble)
        .zip(vec2.map(s=>s.toDouble))
        .map(tp=>Math.pow(tp._1-tp._2,2))
        .sum
      d2
    }
    spark.udf.register("eudist",eudist)

    val dis = spark.sql(
      """
        |
        |select
        |label,
        |imagename,
        |eudist(sample_vec,test_vec) as dis
        |
        |from joind
        |
        |""".stripMargin)


    dis.createTempView("dis")
    val neiber = spark.sql(
      """
        |
        |select
        |label,imagename,dis
        |from
        |(
        |select
        |label,
        |imagename,
        |dis,
        |row_number() over(partition by imagename order by dis ) as rn
        |from dis
        |) o
        |where rn<=3
        |
        |
        |
        |""".stripMargin)
       // .show(100,false)

    /**
     * +-----+---------+-----+
     * |label|imagename|dis  |
     * +-----+---------+-----+
     * |6    |b.png    |132.0|
     * |6    |b.png    |179.0|
     * |7    |b.png    |238.0|
     * |7    |a.png    |67.0 |
     * |7    |a.png    |71.0 |
     * |7    |a.png    |140.0|
     * +-----+---------+-----+
     */

    neiber.createTempView("neiber")

    val cnt = spark.sql(
      """
        |
        |select
        |label,imagename,count(1) as cnt
        |from neiber
        |group by label,imagename
        |
        |
        |""".stripMargin)
        //.show(100,false)

    /**
     * +-----+---------+---+
     * |label|imagename|cnt|
     * +-----+---------+---+
     * |6    |b.png    |2  |
     * |7    |b.png    |1  |
     * |7    |a.png    |3  |
     * +-----+---------+---+
     */

    cnt.createTempView("cnt")
    spark.sql(
      """
        |
        |select
        |imagename,label as predict
        |
        |from
        |(
        |select
        |imagename,
        |label,
        |row_number() over(partition by imagename order by cnt desc)  as rn
        |from cnt
        |) o
        |where rn=1
        |
        |""".stripMargin)
        .show(100,false)



    spark.close()

  }

}
