package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity._
import com.lvmama.rhino.common.utils.JDBCUtil.ConnectionPool
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.logging.LazyLogging
import org.apache.spark.HashPartitioner
import org.apache.spark.streaming.Time
import org.apache.spark.streaming.dstream.DStream

/**
  * Created by yuanxiaofeng on 2017/5/19.
  */
class WirelessV2Visitor extends Serializable with LazyLogging {
  self =>

  case class VisitorType(logType: String, pageTypeCode: String, stationId: String, platformCode: String,
                         timestamp: Long, channelCode: String, pageCode: String, province: String,
                         productId: String, sessionId: String, buttonCode: String, newVisitor: String,
                         specialSelling: String, productName: String, categoryId: String)

  def process(ds: DStream[WirelessClientMonitorLog]) = {
    val detail = ds
      .map(w =>
        VisitorType(w.logType, w.pageTypeCode, w.stationId, w.platformCode, w.timestamp,
          w.channelCode, w.pageCode, w.province, w.pageParam.getOrElse("productId", ""), w.sessionId,
          w.buttonCode, w.newVisitor, w.pageParam.getOrElse("specialSelling", ""), w.pageParam.getOrElse("productName", ""),
          w.pageParam.getOrElse("categoryId", "")))

    //新注册用户
    val newRegister = detail
      .filter(n => n.pageTypeCode == REGISTER.code && n.buttonCode == "register")
      .map(n => ((n.platformCode, n.channelCode), 1))
      .reduceByKey(_ + _)
//    newRegister.print()
//    newRegister.foreachRDD((rdd, time: Time) => {
//      val timeNow = Utils.timeFormat(time.milliseconds)
//      rdd.foreachPartition(p => {
//        val conn = ConnectionPool.getMysqlManager.getConnection
//        val statement = conn.createStatement()
//        try {
//          conn.setAutoCommit(false)
//          p.foreach(record => {
//            val sql = s"insert into flow_app_new_register(app_type, channel, count, oper_date) " +
//              s"values('${record._1._1}', '${record._1._2}', '${record._2}', '$timeNow')"
//            logger.info(sql)
//            statement.addBatch(sql)
//          })
//          statement.executeBatch
//          conn.commit()
//        } catch {
//          case e: Exception => e.printStackTrace()
//        } finally {
//          statement.close()
//          conn.close()
//        }
//      })
//    })

    //新访客
    val newVisitor = detail
      .filter(n => n.newVisitor == "true"  &&
        (n.platformCode == TerminalType.wab.toString || n.platformCode == TerminalType.pc.toString))
      .map(n => ((n.platformCode, n.channelCode), 1))
      .reduceByKey(_ + _)

    newVisitor.foreachRDD((rdd, time: Time) => {
      val timeNow = Utils.timeFormat(time.milliseconds)
      rdd.foreachPartition(p => {
        val conn = ConnectionPool.getMysqlManager.getConnection
        val statement = conn.createStatement()
        try {
          conn.setAutoCommit(false)
          p.foreach(record => {
            val sql = s"insert into flow_stat_new_visitor(platform_code, channel_code, amount, oper_date) " +
              s"values('${record._1._1}', '${record._1._2}', '${record._2}', '$timeNow')"
            logger.info(sql)
            statement.addBatch(sql)
          })
          statement.executeBatch
          conn.commit()
        } catch {
          case e: Exception => e.printStackTrace()
        } finally {
          statement.close()
          conn.close()
        }
      })
    })

    //产品详情分析表
    import spray.json._
//    import DefaultJsonProtocol._
//    val detailPage = detail
//      .filter(d => d.pageTypeCode == DETAIL.code && d.logType == LogType.forward.toString)
//      .map(d => ((d.platformCode,d.stationId, d.categoryId, if (d.specialSelling == "") "false" else d.specialSelling),
//        (d.productId, d.productName)))
//      .combineByKey((v: (String, String)) => List(v), (x: List[(String, String)], y: (String, String)) => y :: x,
//        (x: List[(String, String)], y: List[(String, String)]) => x ::: y, new HashPartitioner(5))
//      .mapValues(l => (l.size,
//        l.groupBy(ll => ll).map(ll => (ll._1._1, ll._1._2, ll._2.length)).toJson.toString()))
//
//    detailPage.foreachRDD((rdd, time: Time) => {
//      val timeNow = Utils.timeFormat(time.milliseconds)
//      rdd.foreachPartition(p => {
//        val conn = ConnectionPool.getMysqlManager.getConnection
//        val statement = conn.createStatement()
//        try {
//          conn.setAutoCommit(false)
//          p.foreach(record => {
//            val sql = s"insert into flow_stat_product_details_pv(platform_code,station_id, category_id, special_sell_flag, product_json," +
//              s"pv, oper_date) values('${record._1._1}', '${record._1._2}', '${record._1._3}','${record._1._4}', '${record._2._2}', " +
//              s"'${record._2._1}','$timeNow')"
//            logger.info(sql)
//            statement.addBatch(sql)
//          })
//          statement.executeBatch
//          conn.commit()
//        } catch {
//          case e: Exception => e.printStackTrace()
//        } finally {
//          statement.close()
//          conn.close()
//        }
//      })
//    })
  }
}
