package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.logging.LazyLogging
import com.lvmama.rhino.common.utils.methodset.CalculateFunction._
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.LongType
import org.apache.spark.storage.StorageLevel
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions._
import com.vdurmont.emoji.EmojiParser

import scala.util.parsing.json.JSONObject

/**
  * Created by yuanxiaofeng on 2017/5/19.
  */
class WirelessV2SearchJump extends Serializable with LazyLogging{
  self =>

  case class SearchType(logType: String, preLogType: String, pageTypeCode: String, stationId: String, platformCode: String,
                        timestamp: Long, channelCode: String, deviceToken: String, sessionId: String,
                        buttonCode: String, pageCode: String, province: String, prePage: String, nextPage: String,
                        preKeyType: String, searchKey: String, preSearchKey: String,  lab: String, productSum: String, pagination: String,
                        productId: String, productName: String, categoryId: String)

  def process(ds: Dataset[WirelessClientMonitorLog]) = {

    val schemas = Utils.extractFieldNames[self.SearchType].dropRight(1)
    org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(self)

    import ds.sparkSession.implicits._
    val search2Detail = ds
      .filter($"buttonCode" =!= "filter")
//      .filter($"pageTypeCode" === SEARCH_PRE.code ||$"pageTypeCode" === SEARCH_DETAIL.code || $"pageTypeCode" === DETAIL.code)
      .withColumn("keyType", col("pageParam").getItem("keyType"))
      .withColumn("searchKey", col("pageParam").getItem("searchKey"))
      .withColumn("lab", col("pageParam").getItem("lab"))
      .withColumn("productSum", col("pageParam").getItem("produceSum"))
      .withColumn("pagination", col("pageParam").getItem("pagination"))
      .withColumn("filterConditions", col("pageParam").getItem("filterConditions"))
      .withColumn("productId", col("pageParam").getItem("productId"))
      .withColumn("productName", col("pageParam").getItem("productName"))
      .withColumn("categoryId", col("pageParam").getItem("categoryId"))
      .withColumn("timestamp", col("timestamp").cast(LongType))
      .withColumn("wordWithCode", concat_ws(":", col("pageTypeCode"), col("searchKey"), col("keyType")))
      .withColumn("preWordWithCode", columnLag(col("wordWithCode")))
      .withColumn("preSearchKey", split(col("preWordWithCode"), ":")(1))
      .withColumn("prePage", split(col("preWordWithCode"), ":")(0))
      .withColumn("preKeyType", split(col("preWordWithCode"), ":")(2))
      .withColumn("nextPage", columnLead(col("pageTypeCode")))
      .withColumn("preLogType", columnLag(col("logType")))
      .select(schemas.map(col(_)): _*)
      .as[SearchType]
      .persist(StorageLevel.MEMORY_AND_DISK_SER)


    //关键字到达品类分析
    val yesterday = Utils.getYesterday()
    val arrivalCategoryAnalysis = search2Detail
      .filter(s => s.pageTypeCode == DETAIL.code && s.prePage == SEARCH_DETAIL.code)
      .groupByKey(s => (s.stationId, s.platformCode, s.categoryId, s.preSearchKey))
      .mapGroups { (key, iter) =>
        val _iter = iter.toList
        (key, _iter.size, _iter.map(i => (i.productId, i.productName))
          .groupBy(p => p).map(l => (l._1.productIterator.mkString(","), l._2.size)))
      }
      .map(l => (l._1._1, l._1._2, l._1._3, l._1._4, l._2, JSONObject(l._3).toString()))
      .groupByKey(s => (s._1, s._2, s._3))
      .mapGroups { (key, iter) =>
        val _iter = iter.toList
        (key, _iter.map(_._5).sum, _iter.map(i => (i._4, i._5, i._6)))
      }.flatMap { line =>
      val part = line._3
      part.map(p => (line._1._1, line._1._2, line._1._3, line._2,
//        EmojiParser.removeAllEmojis(p._1),
        Utils.emojiFilter(p._1),
        p._2, Utils.emojiFilter(p._3)))
    }.toDF("station_id", "platform_code", "category_id", "category_amount", "search_key", "search_key_amount", "search_map")
      .withColumn("oper_date", lit(yesterday))

    arrivalCategoryAnalysis.insertDF2MysqlDirect("flow_search_arrival_key")

    //默认标签跳转分析
    //默认跳转分析逻辑：
    //1. 每个sessionID产生一条跳转记录，直接跳转成功，切换跳转成功，跳转失败
    //2. 一个sessionID中只要有一条跳转成功，就记它跳转成功，有多条时取第一条
    //3. 如果没有跳转成功的，取第一条跳转失败的
    //4. 一个sessionId内，既有直接跳转成功，又有切换跳转成功，也是直接取第一条
    val jump = search2Detail
      .filter(s => s.pageTypeCode == SEARCH_DETAIL.code && s.logType == LogType.click.toString && s.buttonCode == "switch"
      && s.searchKey != null)

    import spray.json._
    import DefaultJsonProtocol._
    val jump_uv = jump
      .groupByKey(s => s.sessionId)
      .mapGroups((key, iter) => {
        val _iter = iter.map(i => (i.stationId, i.platformCode, i.lab, i.preLogType, i.nextPage, i.searchKey)).toList
        if (_iter.exists(_._5 == DETAIL.code)) {
          //            var record = null.asInstanceOf[(String, String, String, String, String, String)]
          //            if (_iter.exists(_._4 == "forward")) {
          //              record = _iter.toStream.filter(f => f._4 == "forward" && f._5 == DETAIL.code).head
          //            } else if (_iter.exists(_._4 == "click")) {
          //              record = _iter.toStream.filter(f => f._4 == "click" && f._5 == DETAIL.code).head
          //            }
          //            record
          val _i = _iter.filter(f => (f._4 == LogType.forward.toString || f._4 == LogType.click.toString)
            && f._5 == DETAIL.code)
          if (_i.nonEmpty) {
            _i.head
          } else {
            ("", "", "", "", "", "")
          }
        } else {
          if (_iter.nonEmpty) {
            _iter.take(1).head
          } else {
            ("", "", "", "", "", "")
          }
        }
      })
      .filter(f => f._2 != "" && f._4 != null)
      .coalesce(10)
      .map(line => ((line._1, line._2, line._3), (line._4 + "_" + line._5, line._6)))
      .groupByKey(g => g._1)
      .mapGroups((key, iter) => {
        val _iter = iter.map(m => m._2).toList
        (key,
          _iter.groupBy(p => p._1).map(p => (p._1, p._2.size)),
          _iter.groupBy(p => p._1).map(p => (p._1, p._2.groupBy(pp => pp._2).map(pp => (pp._1, pp._2.size)))))
      })
      .map(line => (line._1._1, line._1._2, line._1._3,
        line._2.getOrElse(LogType.forward.toString + "_" + DETAIL.code, 0) + line._2.getOrElse("null" + "_" + DETAIL.code, 0),
        line._2.getOrElse(LogType.click.toString + "_" + DETAIL.code, 0),
        line._2.getOrElse(LogType.click.toString + "_" + SEARCH_DETAIL.code, 0) + line._2.getOrElse(LogType.forward.toString + "_" + SEARCH_DETAIL.code, 0),
        Utils.emojiFilter(line._3.getOrElse(LogType.forward.toString + "_" + DETAIL.code, Map.empty[String, Int]).toJson.toString),
        Utils.emojiFilter(line._3.getOrElse(LogType.click.toString + "_" + DETAIL.code, Map.empty[String, Int]).toJson.toString)
        ))
      .toDF("station_id", "platform_code", "category_code", "jump_success", "switch_jump_success", "jump_failure",
        "jump_json", "switch_jump_json")
      .withColumn("oper_date", lit(yesterday))
    jump_uv.insertDF2MysqlDirect("flow_search_jump_uv")

    val flow_search_jump_pv = jump
      .groupByKey(s => (s.stationId, s.platformCode, s.lab))
      .mapGroups((key, iter) => {
        val _iter = iter.map(i => (i.preLogType + "_" + i.nextPage, i.searchKey)).toList
        (key, _iter.groupBy(p => p._1).map(p => (p._1, p._2.size)),
          _iter.groupBy(p => p._1).map(p => (p._1, p._2.groupBy(pp => pp._2).map(pp => (pp._1, pp._2.size)))))
      })
      .map(line => (line._1._1, line._1._2, line._1._3,
        line._2.getOrElse(LogType.forward.toString + "_" + DETAIL.code, 0) + line._2.getOrElse("null" + "_" + DETAIL.code, 0),
        line._2.getOrElse(LogType.click.toString + "_" + DETAIL.code, 0),
        line._2.getOrElse(LogType.click.toString + "_" + SEARCH_DETAIL.code, 0) + line._2.getOrElse(LogType.forward.toString + "_" + SEARCH_DETAIL.code, 0),
        Utils.emojiFilter(line._3.getOrElse(LogType.forward.toString + "_" + DETAIL.code, Map.empty[String, Int]).toJson.toString),
        Utils.emojiFilter(line._3.getOrElse(LogType.click.toString + "_" + DETAIL.code, Map.empty[String, Int]).toJson.toString)
        ))
      .toDF("station_id", "platform_code", "category_code", "jump_success", "switch_jump_success", "jump_failure",
        "jump_json", "switch_jump_json")
      .withColumn("oper_date", lit(yesterday))

    flow_search_jump_pv.insertDF2MysqlDirect("flow_search_jump_pv")


    //搜索结果分析  只取直接输入或补全点击的数据
    val searchResultAnalyze = search2Detail
      .filter(f => f.pageTypeCode == SEARCH_DETAIL.code && f.buttonCode == "switch" && f.productSum == "0"
      && (f.preKeyType == "di" || f.preKeyType == "cc"))

    val searchResultPv = searchResultAnalyze
      .map(s => (s.stationId, s.platformCode, s.channelCode, s.lab, s.searchKey))
      .groupByKey(g => (g._1, g._2, g._3, g._4, g._5))
      .mapGroups((group, iter) => (group, iter.size))
      .map(m => (m._1._1, m._1._2, m._1._3, m._1._4, Utils.emojiFilter(m._1._5), m._2))

    val searchResultUv = searchResultAnalyze
      .map(s => (s.stationId, s.platformCode, s.channelCode, s.lab, s.searchKey, s.sessionId))
      .distinct()
      .groupByKey(g => (g._1, g._2, g._3, g._4, g._5))
      .mapGroups((group, iter) => (group, iter.size))
      .map(m => (m._1._1, m._1._2, m._1._3, m._1._4, Utils.emojiFilter(m._1._5), m._2))

    searchResultPv.join(searchResultUv, Seq("_1", "_2", "_3", "_4", "_5"), "left")
      .filter(f => f.getAs[String]("_5") != null && f.getAs[String]("_5").length <= 50 && f.getAs[String]("_1") != "")
      .withColumn("oper_date", lit(yesterday))
      .toDF("station_id", "platform_code", "channel_code", "category_code", "key_word","amount_PV","amount_UV", "oper_date")
      .insertDF2MysqlDirect("flow_search_result")

    search2Detail.unpersist()
  }

}
