package doit20.datayi.etl

import doit20.datayi.pojo.EventBean
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.apache.spark.sql.expressions.MutableAggregationBuffer
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import scala.collection.{immutable, mutable}
import scala.collection.mutable.ListBuffer

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-21
 * @desc 归因分析
 */
object AttributionAnalysis {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("归因分析")
      .master("local")
      .config("spark.sql.shuffle.partitions", "1")
      .enableHiveSupport()
      .getOrCreate()

    val df = spark.read.table("doit20dwd.app_event_detail").where("dt between '2021-04-01' and '2021-04-20' ")
    val props = Map(("p1", "v1"))
    val targetEvent = EventBean(-1, "e1", props, 0L)
    val attr1 = EventBean(-1, "e5", Map.empty[String, String], 0L)
    val attr2 = EventBean(-1, "e7", Map.empty[String, String], 0L)
    val attr3 = EventBean(-1, "e8", Map.empty[String, String], 0L)


    firstAttribute("跳楼购归因", spark, df, targetEvent, attr1 :: attr2 :: attr3 :: Nil)

  }

  /**
   * @author 涛哥
   * @nick_name "deep as the sea"
   * @contact qq:657270652 wx:doit_edu
   * @site www.doitedu.cn
   * @date 2021-04-21
   * @desc 首次触点归因
   */
  def firstAttribute(modelName: String, spark: SparkSession, df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {

    // 加入目标事件是  e04
    // g01,e01()
    // g01,e03()
    // g01,e01()
    // g01,e04()

    // g02,e02()
    // g02,e03()
    // g02,e05()
    // g02,e06()

    import spark.implicits._
    val filtered: DataFrame = filterConditionBeans(df, targetEvent, toAttributeEvents).toDF()

    filtered.createTempView("f")
    val res = spark.sql(
      s"""
         | -- insert into table doit20dws.app_event_attribution partition(dt='2021-04-14')
         |select
         |'${modelName}' as modelname,
         |'首次触点归因策略' as strategyName,
         |guid,
         |'${targetEvent.eventId}' as targetEvent,
         |sort_array(collect_list(concat_ws('-',timestamp,eventid)))[0] as attributeEvent,
         |1 as weight
         |
         |from f
         |group by guid
         |
         |""".stripMargin)

    res.show(100, false)
  }


  def lastAttribute(df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {

  }


  def linearAttribute(df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {

  }


  def ruleAttribute(modelName: String, spark: SparkSession, df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {


  }


  /**
   * @author 涛哥
   * @nick_name "deep as the sea"
   * @contact qq:657270652 wx:doit_edu
   * @site www.doitedu.cn
   * @date 2021-04-21
   * @desc 时间衰减归因
   */
  def timeDecayAttribute(modelName: String, spark: SparkSession, df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {

    val filtered: RDD[EventBean] = filterConditionBeans(df, targetEvent, toAttributeEvents)

    val weights: RDD[(Long, mutable.Seq[(String, Double)])] = filtered.groupBy(bean => bean.guid).mapValues(iter => {

      val eventList = iter.toList.sortBy(bean => -bean.timeStamp)

      // 按目标事件分段
      var flag = false
      var eventSegment = new ListBuffer[String]
      val segmentList = new ListBuffer[ListBuffer[String]]
      for (elem <- eventList) {
        // 判断，如果当前事件不是目标事件，且当前分段中不存在该事件，则添加到分段中
        if (flag && !eventIsMatch(elem, targetEvent) && !eventSegment.contains(elem.eventId)) {
          eventSegment += elem.eventId
        }
        if (eventIsMatch(elem, targetEvent)) {
          flag = true
          segmentList += eventSegment
          eventSegment = new ListBuffer[String]
        }
      }

      // segment: List[a,b,c]
      val res: mutable.Seq[(String, Double)] = segmentList.flatMap(segment => {

        // 先按时间先后顺序，给各事件赋予一个逐渐衰减的系数
        val tmp: immutable.Seq[(String, Double)] = for (i <- 0 until segment.size) yield {
          (segment(i), 1 * Math.pow(0.9, i))
        }

        // 用系数之和作为分母
        val fenmu = tmp.map(tp => tp._2).sum
        // 用各事件自己的系数作为分子，除以分母后得到占比权重
        val weights: immutable.Seq[(String, Double)] = tmp.map(tp => (tp._1, tp._2 / fenmu))

        weights.toList

      })

      res
    })

    // 打散
    val res: RDD[(Long, String, Double)] = weights.flatMap(tp => {
      tp._2.map(t => (tp._1, t._1, t._2))
    })

    import spark.implicits._
    res.toDF("guid", "attributeEvent", "weight").selectExpr(s"'${modelName}' as modelname",
      "'时间衰减策略' as strategyName",
      "guid",
      s"'$targetEvent' as targetEvent",
      "attributeEvent",
      "weight"
    ).show(100, false)

  }


  private def filterConditionBeans(df: DataFrame, targetEvent: EventBean, toAttributeEvents: List[EventBean]) = {
    val filtered = df.rdd
      .map(row => row2Bean(row))
      .filter(bean => {
        val events = targetEvent :: toAttributeEvents
        var flag = false
        for (e <- events; if !flag) {
          if (eventIsMatch(bean, e)) flag = true
        }
        flag
      })
    filtered
  }


  def row2Bean(row: Row): EventBean = {
    val timeStamp = row.getAs[Long]("timeStamp")
    val guid = row.getAs[Long]("guid")
    val eventId = row.getAs[String]("eventId")
    val properties = row.getAs[Map[String, String]]("properties")

    EventBean(guid, eventId, properties, timeStamp)

  }

  def eventIsMatch(eventBeanSrc: EventBean, eventBeanTarget: EventBean): Boolean = {
    // 如果条件和待比较事件，eventid不同直接返回false
    if (!eventBeanSrc.eventId.equals(eventBeanTarget.eventId)) return false

    // 条件中没有属性，则返回true
    if (eventBeanTarget.properties.size == 0) return true


    // 条件中有属性
    val targetPropKeys = eventBeanTarget.properties.keySet

    for (k <- targetPropKeys) {
      val srcValueOption = eventBeanSrc.properties.get(k)
      if (srcValueOption.isEmpty) return false

      if (!eventBeanTarget.properties.get(k).get.equals(srcValueOption.get)) return false

    }
    true
  }


}
