package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.JDBCUtil.ConnectionPool
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.logging.LazyLogging
import org.apache.spark.HashPartitioner
import org.apache.spark.streaming.Time
import org.apache.spark.streaming.dstream.DStream

/**
  * Created by yuanxiaofeng on 2017/5/16.
  */
class WirelessV2PitStat extends Serializable with LazyLogging {
  self =>

  case class PitAccessType(logType: String, pageTypeCode: String, stationId: String, platformCode: String,
                           timestamp: Long, channelCode: String, pageCode: String, province: String,
                           productId: String, sessionId: String, categoryId: String, orderNum: String,
                           paymentAmount: String, holeCode: String, accessCode: String)

  def process(ds: DStream[WirelessClientMonitorLog]) = {

    val pitAccessRecord = ds
      .map(w =>
        PitAccessType(w.logType, w.pageTypeCode, w.stationId, w.platformCode, w.timestamp,
          w.channelCode, w.pageCode, w.province, w.pageParam.getOrElse("productId", ""),
          w.sessionId, w.pageParam.getOrElse("categoryId", ""), w.pageParam.getOrElse("orderNum", ""),
          w.pageParam.getOrElse("paymentAmount", ""), w.holeCode, w.accessCode))

    import spray.json._
    import DefaultJsonProtocol._

    val conversion = pitAccessRecord
      .filter(d => d.logType == LogType.forward.toString)
      .filter(d => d.pageTypeCode == DETAIL.code || d.pageTypeCode == Fill_ORDER.code ||
        d.pageTypeCode == PENDING_PAYMENT.code || d.pageTypeCode == COMPLETED_PAYMENT.code ||
        d.pageCode.contains(TOPIC.code))
      .flatMap { line =>
        var entranceCode: Map[String, Int] = Map()
        //        var entranceCode = Nil.asInstanceOf[List[String]]
        if (line.holeCode != "kwnull") {
          entranceCode += (line.holeCode->0)
        }
        if (line.accessCode != "null") {
          entranceCode += (line.accessCode->1)
        }
        entranceCode.map(p => ((line.platformCode, line.channelCode, p,line.categoryId),
          line.pageTypeCode))
      }
      .combineByKey((v: String) => List(v), (x: List[String], y: String) => y :: x,
        (x: List[String], y: List[String]) => x ::: y, new HashPartitioner(5))
      .map(k => (k._1, k._2.groupBy(l => l).map(ll => (ll._1, ll._2.size))))
      .map { line =>
        val part = line._2.map(l => if (l._1.contains(TOPIC.code)) (TOPIC.code, l._2) else (l._1, l._2))
        (line._1, part.getOrElse(TOPIC.code, 0) , part.getOrElse(DETAIL.code, 0), part.getOrElse(Fill_ORDER.code, 0),
          part.getOrElse(PENDING_PAYMENT.code, 0), part.getOrElse(COMPLETED_PAYMENT.code, 0))
      }

    conversion.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_stat_pit_bit_pv(platformCode,channelCode, code,code_type,category_id, topic_page_sum,detail_page_sum,fill_order_page_sum,pending_payment_page_sum,completed_payment_page_sum,oper_date) values" +
              s"('${record._1._1}', '${record._1._2}','${record._1._3._1}','${record._1._3._2}','${record._1._4}', ${record._2},${record._3},${record._4},${record._5},${record._6}, '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })

  }

}
