package cn.doitedu.dwetl

import java.text.DecimalFormat

import org.apache.spark.sql.SparkSession

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-21
 * @desc 归因分析
 */
object AttributeAnalysis {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()
    import org.apache.spark.sql.functions._
    import spark.implicits._


    val demo = spark.read.table("test.attribute_demo")

    // 首次触点归因
    // -- 定义目标事件为 e6   ，待归因事件有： e2  e3  e5
    val filtered = demo.where('eventid.isin("e1", "e3", "e5", "e6"))
      .groupBy("guid")
      .agg(array_sort(collect_list(concat_ws("_", lpad('ts, 2, "0"), 'eventid))) as "evs")
      .where(concat_ws("", 'evs).rlike(".*?e6.*?"))

    // 工具方法：
    // 给一个数组，求首次归因事件
    val findFirst = (evs: Array[String]) => {
      evs(0)
    }


    // 工具方法：
    // 将事件列表，根据目标事件的可能多次发生位置，切割成多个事件列表
    val splitEvList = (evs: Array[String], targetEv: String) => {
      val buffer = new ListBuffer[ListBuffer[String]]
      var lst = new ListBuffer[String]
      for (i <- 0 until evs.size) {
        if (evs(i).equals(targetEv)) {
          buffer += lst
          lst = new ListBuffer[String]
        } else {
          lst += evs(i)
        }
      }
      buffer.toList
    }


    // 首次触点归因自定义函数
    val find_firstAttr = (evs: mutable.WrappedArray[String], targetEv: String) => {
      val strings = evs.toArray.map(_.split("_")(1)).toArray
      val segList: Array[ListBuffer[String]] = splitEvList(strings, targetEv).toArray

      for (elem <- segList) yield findFirst(elem.toArray)
    }


    // val find_firstAttr_udf = udf(find_firstAttr)
    // filtered.select('guid,find_firstAttr_udf('evs,lit("e6"))).show(100,false)
    filtered.createTempView("demo")
    spark.udf.register("find_firstAttr", find_firstAttr)


    // 末次触点归因自定义函数
    val find_lastAttr = (evs: mutable.WrappedArray[String], targetEv: String) => {
      val list: Array[ListBuffer[String]] = splitEvList(evs.toArray.map(_.split("_")(1)).toArray, targetEv).toArray
      for (elem <- list) yield findFirst(elem.reverse.toArray)
    }
    spark.udf.register("find_lastAttr", find_lastAttr)


    // 线性归因自定义函数
    val linearAttr = (evs: mutable.WrappedArray[String], targetEv: String) => {
      val list: Array[ListBuffer[String]] = splitEvList(evs.toArray.map(_.split("_")(1)).toArray, targetEv).toArray

      list.flatMap(lst=>{
        val attrEvs: Set[String] = lst.toSet
        val df = new DecimalFormat("0.00")

        attrEvs.map(e => e + "," + df.format(100.0/ attrEvs.size))
      })
    }


    spark.udf.register("find_linearAttr", linearAttr)
    spark.sql(
      """
        |
        |select
        |'首次触点归因' as attr_model_name,
        |'e6' as attr_target,
        |guid,
        |first_attr,
        |100 as attr_weight
        |from demo lateral view explode(find_firstAttr(evs,'e6')) o as first_attr
        |
        |union all
        |
        |select
        |'末次触点归因' as attr_model_name,
        |'e6' as attr_target,
        |guid,
        |last_attr,
        |100 as attr_weight
        |from demo lateral view explode(find_lastAttr(evs,'e6')) o as last_attr
        |
        |union all
        |
        |select
        |'线性归因' as attr_model_name,
        |'e6' as attr_target,
        |guid,
        |split(attr_weight,',')[0],
        |split(attr_weight,',')[1]
        |from demo lateral view explode(find_linearAttr(evs,'e6')) o as attr_weight
        |
        |""".stripMargin).show(100, false)

    /**
     *
     * +---------------+-----------+----+----------+-----------+
     * |attr_model_name|attr_target|guid|first_attr|attr_weight|
     * +---------------+-----------+----+----------+-----------+
     * |首次触点归因   |e6         |g001|e1        |100        |
     * |首次触点归因   |e6         |g001|e5        |100        |
     * |首次触点归因   |e6         |g002|e3        |100        |
     * |末次触点归因   |e6         |g001|e5        |100        |
     * |末次触点归因   |e6         |g001|e5        |100        |
     * |末次触点归因   |e6         |g002|e5        |100        |
     * |线性归因       |e6         |g001|e1        |33.33      |
     * |线性归因       |e6         |g001|e3        |33.33      |
     * |线性归因       |e6         |g001|e5        |33.33      |
     * |线性归因       |e6         |g001|e5        |100.00     |
     * |线性归因       |e6         |g002|e3        |33.33      |
     * |线性归因       |e6         |g002|e1        |33.33      |
     * |线性归因       |e6         |g002|e5        |33.33      |
     * +---------------+-----------+----+----------+-----------+
     *
     *
     */



    // 位置归因


    // 时间衰减归因


    spark.close()

  }

}
