package cn.doitedu.spark

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

import scala.collection.{immutable, mutable}

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-16
 * @desc 归因分析
 */
object AttributeDemo {

  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .getOrCreate()


    val df = spark.read.csv("testdata/attribute/input/attr.csv").toDF("guid","eventid","props","ts")
    df.createTempView("df")

    val findFirst= (arr:mutable.WrappedArray[Map[String,String]])=>{
      val arr2 = arr.sortBy(mp => (mp.getOrElse("ts","-1")).toInt)
      val i = arr2.indexWhere(mp => (mp.getOrElse("eid",null).equals("e1")  || mp.getOrElse("eid",null).equals("e3") ))

      var e:String  = null
      if(i >=0 ) {
        e = arr2(i).get("eid").get
      }
      e
    }

    val findLast= (arr:mutable.WrappedArray[Map[String,String]])=>{
      val arr2 = arr.sortBy(mp => -(mp.getOrElse("ts","-1")).toInt)
      val i = arr2.indexWhere(mp => (mp.getOrElse("eid",null).equals("e1")  || mp.getOrElse("eid",null).equals("e3") ))

      var e:String  = null
      if(i >=0 ) {
        e = arr2(i).get("eid").get
      }
      e
    }

    val linear = (arr:mutable.WrappedArray[Map[String,String]])=>{
      val es = Array("e3","e1","e6")
      val tuples: Array[String] = arr.toArray.map(mp => mp.get("eid").get)
      val strings = tuples.toSet.intersect(es.toSet)
      strings.map(s=>s+"\001"+ 100/strings.size).mkString(",")
    }


    val timed = (arr:mutable.WrappedArray[Map[String,String]])=>{
      val es = Array("e3","e1","e6")

      val arr2 = arr.toArray.filter(mp => es.contains(mp.get("eid").get))


      var res= arr2(0)
      var tmp = res.get("eid").get
      val mp2 = for(i <- 1 until arr2.size) yield {
        if(!arr2(i).get("eid").get.equals(tmp)){
          tmp = arr2(i).get("eid").get
          arr2(i)
        }else{
          Map.empty[String,String]
        }
      }

      val mpArr = mp2.filter(_.size>0)
      val sorted: immutable.Seq[Map[String, String]] = mpArr.sortBy(mp=> -mp.get("ts").get.toInt)

      var sum = 0
      for(i <- 1 to sorted.size) sum = sum+i

      val scores: immutable.Seq[String] = for(i <- 0 until sorted.size) yield {
        sorted(i).get("eid").get + "\001" + 100*(sorted.size - i) /sum
      }

      scores.mkString(",")

    }


    spark.udf.register("fir",findFirst)
    spark.udf.register("lst",findLast)
    spark.udf.register("linear",linear)
    spark.udf.register("timed",timed)

    spark.sql(
      """
        |
        |select
        |guid,
        |collect_list(map('eid',eventid,'ts',ts)) as mp
        |from df
        |group by guid
        |
        |""".stripMargin).show(100,false)

    spark.sql(
      """
        |
        |select
        |guid,
        |fir(collect_list(map('eid',eventid,'ts',ts))) as f1,
        |lst(collect_list(map('eid',eventid,'ts',ts))) as f2,
        |linear(collect_list(map('eid',eventid,'ts',ts))) as f3,
        |timed(collect_list(map('eid',eventid,'ts',ts))) as f4
        |from df
        |group by guid
        |
        |""".stripMargin).show(100,false)





    spark.close()
  }

}
