package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.JDBCSink
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.methodset.CalculateFunction
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.{Dataset, Encoder, Encoders}
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.functions._
import org.apache.spark.sql.streaming.{OutputMode, ProcessingTime}
import org.apache.spark.sql.types.LongType

import scala.collection.mutable
import scala.util.parsing.json.JSONObject


/**
  * Created by yuanxiaofeng on 2017/3/20.
  */
class WirelessV2Search extends Serializable{
  self =>
  // 搜索
  case class SearchType(logType: String, pageTypeCode: String, stationId: String, platformCode: String,
                        timestamp: Long, channelCode: String, deviceToken: String,
                        buttonCode: String, pageCode: String, province: String, prePage: String, nextPage: String,
                        keyType: String, searchKey: String, lab: String, productSum: String, pagination: String,
                        filterConditions: String, productId: String)

  def process(ds: Dataset[WirelessClientMonitorLog]): Unit = {
    val schemas = Utils.extractFieldNames[self.SearchType].dropRight(1)
    org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(self)
    import ds.sparkSession.implicits._
    val detail = ds
//      .filter(_.logType == "forward")
      .withColumn("keyType", col("pageParam").getItem("keyType"))
      .withColumn("searchKey", col("pageParam").getItem("searchKey"))
      .withColumn("lab", col("pageParam").getItem("lab"))
      .withColumn("productSum", col("pageParam").getItem("produceSum"))
      .withColumn("pagination", col("pageParam").getItem("pagination"))
      .withColumn("filterConditions", col("pageParam").getItem("filterConditions"))
      .withColumn("productId", col("pageParam").getItem("pi"))
      .withColumn("timestamp", col("timestamp").cast(LongType))
      .withColumn("prePage",
        lag(col("pageTypeCode"), 1).over(CalculateFunction.windowSpec(col("deviceToken"), col("timestamp"))))
      .withColumn("nextPage",
        lead(col("pageTypeCode"), 1).over(CalculateFunction.windowSpec(col("deviceToken"), col("timestamp"))))
      .select(schemas.map(col(_)): _*)
      .as[SearchType]

//    detail.printSchema()
//    detail.show(100)

    // 搜索流量分析
    // 进入一次搜索前置页，计算一次流量
    val search = detail.filter(_.pageTypeCode == SEARCH_PRE.code)
      .filter(_.logType == "forward")
    val searchFlow = search
      .groupByKey(w => (w.platformCode, w.channelCode)).count()
//        .groupBy(window($"timestamp", "10 minutes"), $"platformCode", $"channelCode")

    import scala.concurrent.duration._
    val write = new JDBCSink()
    val ss = searchFlow
      .map(w => (w._1._1, w._1._2, w._2))
      .withColumn("oper_date", lit(Utils.getTime()))
      .writeStream
      .foreach(write)
      .outputMode("append")
//      .option("checkpointLocation", "hdfs://10.200.2.104:8020/checkpoint/wireless_searchFlow")
      .trigger(ProcessingTime(1.minutes))
      .start()

    ss.awaitTermination()

    //关键字分析
    search.groupByKey(w => (w.stationId, w.platformCode, w.keyType))
      .count

    search.groupByKey(w => (w.stationId, w.platformCode, w.searchKey))
      .agg(new ComplexResultAgg[SearchType](w => w.keyType).toColumn)

    search.groupByKey(w => (w.stationId, w.platformCode, w.searchKey))
      .agg(new ComplexResultAgg[SearchType](w => w.province).toColumn)

    //关键字到达品类分析
    detail.filter(_.prePage == SEARCH_DETAIL.code)
      .filter(_.pageCode == DETAIL.code)
      .groupByKey(w => (w.stationId, w.platformCode, w.searchKey, w.lab))
          .agg(new ComplexResultAgg[SearchType](w => w.productId).toColumn)

    //默认搜索结果跳转
    val default_jump = detail.filter(_.pageCode == SEARCH_DETAIL.code)
      .filter(_.prePage == SEARCH_PRE.code)

    val jump_pv = default_jump.groupByKey(w => (w.platformCode, w.stationId, w.lab))
      .agg(typed.count[SearchType](_.lab).name("sum"))

    /*
      mapWithState  checkPoint + WAL
     */
//    val jump_uv = _

    val jump_success = detail.filter(_.pageCode == SEARCH_DETAIL.code)
      .filter(_.prePage == SEARCH_PRE.code)
      .filter(_.nextPage == DETAIL.code)
      .groupByKey(w => (w.platformCode, w.stationId, w.lab))
      .agg(typed.count[SearchType](_.lab).name("success_sum"))

    //默认成功跳转
    jump_pv.join(jump_success, Seq("key"), "left")

    //切换跳转
    val switch_jump = detail.filter(_.prePage == SEARCH_DETAIL.code)
      .filter(_.prePage == SEARCH_DETAIL.code)

    //pv
    val switch_pv_lab = switch_jump.groupByKey(w => (w.stationId, w.platformCode, w.lab))
      .agg(typed.count[SearchType](_.lab).name("sum"))

    val switch_pv_lab_success = switch_jump.filter(_.nextPage == DETAIL.code)
      .groupByKey(w => (w.stationId, w.platformCode, w.lab))
      .agg(typed.count[SearchType](_.lab).name("sum"))

    switch_pv_lab.join(switch_pv_lab_success, Seq("key"), "left")

    //景点门票成功跳转
    switch_jump.groupByKey(w => (w.stationId, w.platformCode, w.lab))
      .agg(new ComplexResultAgg[SearchType](w => w.searchKey).toColumn)

    /*
      mapWithState  checkPoint + WAL
     */
//    val switch_uv = _

    //搜索结果分析 pv
    search.groupByKey(w => (w.stationId, w.platformCode, w.channelCode))
      .agg(new ComplexResultAgg[SearchType](w => w.searchKey).toColumn)

    //搜索结果分析  uv

    //筛选条件分析

    //翻页使用详情
    search.groupByKey(w => w.platformCode)
      .agg(new ComplexResultAgg[SearchType](w => w.pagination).toColumn)

  }

}

class ComplexResultAgg[T](f: T => String)
  extends Aggregator[T, mutable.Map[String, Int], String]{

  override def zero: mutable.Map[String, Int] = mutable.Map.empty[String, Int]

  override def reduce(b: mutable.Map[String, Int], a: T): mutable.Map[String, Int] = {
    val _in: String = f(a)
    if (b.contains(_in)) {
      b += (_in -> (b(_in) + 1))
    } else {
      b += (_in -> 1)
    }
  }

  override def finish(reduction: mutable.Map[String, Int]): String = JSONObject.apply(reduction.toMap).toString()

  override def merge(b1: mutable.Map[String, Int], b2: mutable.Map[String, Int]): mutable.Map[String, Int] = b1 ++ b2
    .map{case (k, v) => k -> (v + b1.getOrElse(k, 0))}

  override def bufferEncoder: Encoder[mutable.Map[String, Int]] = ExpressionEncoder()

  override def outputEncoder: Encoder[String] = Encoders.STRING
}