package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity.{SEARCH_DETAIL, SEARCH_PRE, WirelessClientMonitorLog}
import com.lvmama.rhino.common.utils.JDBCUtil.ConnectionPool
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.logging.LazyLogging
import org.apache.spark.HashPartitioner
import org.apache.spark.streaming.{State, StateSpec, Time}
import org.apache.spark.streaming.dstream.DStream

import scala.util.parsing.json.{JSON, JSONObject}

/**
  * Created by yuanxiaofeng on 2017/4/20.
  */
class WirelessVer2Search extends Serializable with LazyLogging {
  self =>

  case class SearchType(logType: String, pageTypeCode: String, stationId: String, platformCode: String,
                        timestamp: Long, channelCode: String, deviceToken: String,
                        buttonCode: String, pageCode: String, province: String, /*prePage: String, nextPage: String,*/
                        keyType: String, searchKey: String, lab: String, productSum: String, pagination: String,
                        filterConditions: String, productId: String, sessionId: String)

  def process(ds: DStream[WirelessClientMonitorLog]) = {
    //    val schemas = Utils.extractFieldNames[self.SearchType].dropRight(1)
    //    org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(self)
    val detail = ds
      .map(w =>
        SearchType(w.logType, w.pageTypeCode, w.stationId, w.platformCode, w.timestamp,
          w.channelCode, w.deviceToken, w.buttonCode, w.pageCode, w.province,
          w.pageParam.getOrElse("keyType", ""), w.pageParam.getOrElse("searchKey", ""), w.pageParam.getOrElse("lab", ""),
          w.pageParam.getOrElse("produceSum", "0"), w.pageParam.getOrElse("pagination", ""),
          w.pageParam.getOrElse("filterConditions", ""), w.pageParam.getOrElse("productId", ""), w.sessionId))

    //搜索流量分析
    // 进入一次搜索前置页，计算一次流量
    val searchTraffic = detail.filter(d => d.pageTypeCode == SEARCH_PRE.code && d.logType == "forward")
      .map(d => ((d.platformCode, d.channelCode), 1))
      .reduceByKey(_ + _)

//    searchTraffic.print()
    searchTraffic.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_search_stat(platform_code, channel_code, amount, oper_date) values" +
              s"('${record._1._1}', '${record._1._2}', ${record._2}, '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })

    //关键字分析
    val searchKey = detail.filter(d => d.pageTypeCode == SEARCH_PRE.code && d.logType == "click")

    val key = searchKey.map(d => ((d.platformCode, d.channelCode, d.keyType, d.stationId), 1))
      .reduceByKey(_ + _)
//    key.print()
    key.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_search_key_type(platform_code, channel_code, key_type, station_id, amount, oper_date) values" +
              s"('${record._1._1}', '${record._1._2}', '${record._1._3}',${record._1._4}, ${record._2}, '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })

    //关键字详情分析
    val keyDetails = searchKey
      .map(d => ((d.platformCode, d.channelCode, d.stationId, d.searchKey), d.keyType))
      //      .groupByKey(5)
      //      .map(k => (k._1, k._2.groupBy(l => l).map(ll => (ll._1, ll._2.size))))
      //      .map(k => (k._1, JSONObject(k._2).toString()))
      //      .reduceByKey((k1, k2) => {
      //        val list = k1 :: k2 :: Nil
      //        val l = list.groupBy(l => l).map(l => (l._1, l._2.length))
      //        JSONObject(l).toString()
      //      })
      .combineByKey((v: String) => List(v), (x: List[String], y: String) => y :: x,
      (x: List[String], y: List[String]) => x ::: y, new HashPartitioner(5))
      .map(k => (k._1, k._2.groupBy(l => l).map(ll => (ll._1, ll._2.size))))
      .map(k => (k._1, (k._2.reduceRight((a, b) => (a._1, a._2 + b._2))._2, JSONObject(k._2).toString())))
    val keyDetailsProv = searchKey
      .map(d => ((d.platformCode, d.channelCode, d.stationId, d.searchKey), d.province))
      .combineByKey((v: String) => List(v), (x: List[String], y: String) => y :: x,
        (x: List[String], y: List[String]) => x ::: y, new HashPartitioner(5))
      .map(k => (k._1, k._2.groupBy(l => l).map(ll => (ll._1, ll._2.size))))
      .map(k => (k._1, (k._2.reduceRight((a, b) => (a._1, a._2 + b._2))._2, JSONObject(k._2).toString())))

    keyDetails.join(keyDetailsProv)
      .foreachRDD((rdd, time: Time) => {
        val timeNow = Utils.timeFormat(time.milliseconds)
        rdd.foreachPartition(p => {
          val conn = ConnectionPool.getMysqlManager.getConnection
          val statement = conn.createStatement()
          try {
            conn.setAutoCommit(false)
            p.foreach(record => {
              val sql = s"insert into flow_search_key(platform_code, channel_code, station_id, key_word, key_type_map," +
                s"key_type_amount,prov_amount,prov_map, oper_date) values('${record._1._1}', '${record._1._2}', '${record._1._3}', '${record._1._4}', " +
                s"'${record._2._1._2}', ${record._2._1._1},${record._2._2._1},'${record._2._2._2}', '$timeNow')"
              logger.info(sql)
              statement.addBatch(sql)
            })
            statement.executeBatch
            conn.commit()
          } catch {
            case e: Exception => e.printStackTrace()
          } finally {
            statement.close()
            conn.close()
          }
        })
      })

    //搜索结果分析
    val searchResult = detail
      .filter(d => d.pageTypeCode == SEARCH_DETAIL.code && d.buttonCode == "switch" && d.productSum == "0")

    def trackStateFunc(batchTime: Time, key: (String, String, String, String, String, String),
                       value: Option[Int], state: State[Long]): Option[((String, String, String, String, String, String), Long)] = {
      var output: Long = 0L
      if (state.exists()) {
        val shouldRemove = {
          if (Utils.timeFormat(batchTime.milliseconds, "dd").toInt -
            Utils.getYesterday("dd").toInt == 0) {
            false
          } else {
            true
          }
        }
        if (shouldRemove) {
          state.remove()
          output = value.getOrElse(0).toLong
          state.update(output)
        } else {
          val sum = value.getOrElse(0).toLong + state.getOption.getOrElse(0L)
          output = sum
          state.update(sum)
        }
      } else {
        val initialState = value.getOrElse(0).toLong
        output = initialState
        state.update(initialState)
      }
      Some((key, output))
    }

    val stateSpec = StateSpec.function(trackStateFunc _)
      //      .initialState(initialRDD)
      .numPartitions(5)
    //      .timeout(Seconds(60))

    val searchResultAnalyze = searchResult
      .map(s => ((s.stationId, s.platformCode, s.channelCode, s.lab, s.searchKey, s.sessionId), 1))
      .reduceByKey(_ + _)
      .mapWithState(stateSpec)
      .stateSnapshots()
    //    searchResultAnalyze.print()
    val searchResultUv = searchResultAnalyze
      .map(s => ((s._1._1, s._1._2, s._1._3, s._1._4, s._1._5), 1))
      .reduceByKey(_ + _)

    val searchResultPv = searchResultAnalyze
      .map(s => ((s._1._1, s._1._2, s._1._3, s._1._4, s._1._5), s._2))
      .reduceByKey(_ + _)

//    searchResultPv.leftOuterJoin(searchResultUv)
//      .foreachRDD((rdd, time: Time) => {
//        val timeNow = Utils.timeFormat(time.milliseconds)
//        rdd.foreachPartition(p => {
//          val conn = ConnectionPool.getMysqlManager.getConnection
//          val statement = conn.createStatement()
//          try {
//            conn.setAutoCommit(false)
//            p.foreach(record => {
//              val sql = s"insert into flow_search_result(channel_code, platform_code, category_code, station_id," +
//                s"key_word, amount_PV, amount_UV, oper_date) values('${record._1._3}', '${record._1._2}', '${record._1._4}'," +
//                s"${record._1._1},'${record._1._5}','${record._2._1}' ,'${record._2._2}', '$timeNow')"
//              logger.info(sql)
//              statement.addBatch(sql)
//            })
//            statement.executeBatch
//            conn.commit()
//          } catch {
//            case e: Exception => e.printStackTrace()
//          } finally {
//            statement.close()
//            conn.close()
//          }
//        })
//      })

    //刷选条件分析
    val searchFilter = detail
      .filter(d => d.pageTypeCode == SEARCH_DETAIL.code && d.buttonCode == "filter")
      .map(s => ((s.lab, s.platformCode), JSON.parseFull(s.filterConditions)
        .getOrElse(Map.empty[String, Any]).asInstanceOf[Map[String, Any]]
        .flatMap(m => {
          m._2 match {
            case s: String =>
              List(m._1 -> s)
            case map: List[Map[String, String]] =>
              map.map(mm => mm.getOrElse("code", "") -> mm.getOrElse("value", ""))
            case map: Map[String, String] =>
              List(map.getOrElse("code", "") -> map.getOrElse("value", ""))
          }}).toList
        ))
      .reduceByKey((v1, v2) => v1 ++ v2)
      .mapValues(_.map { case (k, v) => (k + "&" + v) -> 1 })
      .map(k => (k._1, k._2.groupBy(l => l._1).map(ll => (ll._1, ll._2.size))))
      .map(k => (k._1, JSONObject(k._2).toString()))
//    searchFilter.print()
    searchFilter.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_search_filter(platform_code, category_id, filter_condition," +
              s"oper_date) values('${record._1._2}', '${record._1._1}', '${record._2}', '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })

    //翻页使用详情
    val paging = detail.filter(d => d.pageTypeCode == SEARCH_DETAIL.code && d.buttonCode == "filter")
      .map(s => ((s.platformCode, s.lab, s.pagination), 1))
      .reduceByKey(_ + _)

//    paging.print()
    paging.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_search_paging(platform_code, category_code, page_num, amount," +
              s"oper_date) values('${record._1._1}', '${record._1._2}', '${record._1._3}', '${record._2}', '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })
  }

}
