package com.zhao.streaming

import com.zhao.commonUtil.{ConnectionManager, JsonParser}
import com.zhao.util.SparkUtil
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.dstream.DStream

import java.sql.{Connection, PreparedStatement}

/**
 * Description: 当日新增用户|总用户|当日实名|总实名<br/>
 * Copyright (c) ，2021 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2021/1/11 9:48
 *
 * @author 柒柒
 * @version : 1.0
 */

object NewAndCertUser {
  def main(args: Array[String]): Unit = {
    if (args.length < 3) {
      System.err.println("Usage: NewAndCertUser <BatchInterval> <WindowDuration> <SlideDuration>")
      System.exit(1)
    }
    Logger.getRootLogger.setLevel(Level.ERROR)
    //创建流
    val (messages, ssc) = SparkUtil.createKafkaStream(args)

    //获取信息打印
    //messages.print()

    //新增用户解析和持久化
    val newStream: DStream[Int] = newStreamParser(messages)
    persistNewUser(newStream)

    //实名用户解析和持久化
    val certStream: DStream[Int] = certStreamParser(messages)
    persistCertUser(certStream)
    ssc.start()
    ssc.awaitTermination()
    }

  /**
   * 实名用户持久化
   * @param dstream
   */
  def persistCertUser(dstream: DStream[Int]) = {

    dstream.foreachRDD(rdd =>{
      rdd.foreachPartition(iter =>{
        val now = System.currentTimeMillis()
        var conn: Connection = null
        var pstmt: PreparedStatement = null
        try {
          conn = ConnectionManager.getConnnection
          conn.setAutoCommit(false)
          val sql =
            """
              |insert into fz_bigscreen_cert_user_info(stat_date,stat_type,user_count,update_time)
              |values(?,?,?,?)
              |on duplicate key update stat_date=values(stat_date),stat_type=values(stat_type),
              |user_count=values(user_count) + user_count,update_time=values(update_time)
              |""".stripMargin

        pstmt = conn.prepareStatement(sql)
          while (iter.hasNext){
            val user_count = iter.next()
            pstmt.setDate(1, new java.sql.Date(now))
            pstmt.setString(2, "stream")
            pstmt.setInt(3, user_count)
            pstmt.setTimestamp(4, new java.sql.Timestamp(now))
            pstmt.addBatch()
          }
          pstmt.executeBatch()
          conn.commit()
        }catch{
          case e: Throwable =>e.printStackTrace()
            conn.rollback()
        }finally {
          ConnectionManager.closeAll(null,pstmt,conn)
        }
      })
    })
  }

  /**
   * 实名用户解析持久化
   * @param dstream
   * @return
   */
  def certStreamParser(dstream: DStream[ConsumerRecord[String, String]]) = {
    val certStream = dstream.filter(line => {
      val info = line.value()
      val jsonParser = new JsonParser(info)
      if (info.contains("Event") && info.contains("Schema") && info.contains("Table")
        && info.contains("Before") && info.contains("After")) {
        val table = jsonParser.getJsnField("Table")
        val event = jsonParser.getJsnField("Event")
        val schema = jsonParser.getJsnField("Schema")
        ("np_user_info".equals(table) && "systoon_user".equals(schema) && ("UPDATE".equals(event) || "INSERT".equals(event)))
      } else {
        false
      }
    }).filter(line => {
      val info = line.value()
      val event = JsonParser.getJsnField(info, "Event")
      val cert_levelBefore = JsonParser.getJsnColFieldBefore(info, "cert_level")
      val cert_levelAfter = JsonParser.getJsnColField(info, "cert_level")
      var flag = false
      if ("UPDATE".equals(event) && "L1".equals(cert_levelBefore) && (!"L1".equals(cert_levelAfter))) {
        flag = true
      }
      if ("INSERT".equals(event) && (!"L1".equals(cert_levelAfter))) {
        flag = true
      }
      flag
    })
    certStream.map(line => 1).reduce(_ + _)
  }

  /**
   * 新增用户持久化到数据库
   * @param dstream
   */
  def persistNewUser(dstream: DStream[Int]) = {
    dstream.foreachRDD(rdd =>{
      rdd.foreachPartition(iter =>{
        val now = System.currentTimeMillis()
        var conn: Connection = null
        var pstmt: PreparedStatement = null
        try{
          conn = ConnectionManager.getConnnection
          conn.setAutoCommit(false)
          val sql =
            """
              |insert into fz_bigscreen_regist_user_info(stat_date,stat_type,user_count,update_time)
              |values(?,?,?,?)
              |on duplicate key update stat_date=values(stat_date),stat_type=values(stat_type),
              |user_count=values(user_count) + user_count,update_time=values(update_time)
              |""".stripMargin

          pstmt = conn.prepareStatement(sql)
          while (iter.hasNext){
            val user_count = iter.next()
            pstmt.setDate(1,new java.sql.Date(now))
            pstmt.setString(2,"stream")
            pstmt.setInt(3,user_count)
            pstmt.setTimestamp(4,new java.sql.Timestamp(now))
            pstmt.addBatch()
          }
          pstmt.executeBatch()
          conn.commit()
        }catch {
          case e: Throwable => e.printStackTrace()
            conn.rollback()
        }finally {
          ConnectionManager.closeAll(null,pstmt,conn)
        }
      })
    })
  }

  /**
   * 新增用户解析
   * @param dstream
   * @return
   */
  def newStreamParser(dstream: DStream[ConsumerRecord[String, String]]) = {

    val newStream: DStream[ConsumerRecord[String, String]] = dstream.filter(line => {
      val info: String = line.value()
      val jsonParser = new JsonParser(info)
      if (info.contains("Event") && info.contains("Schema") && info.contains("Table")) {
        val table = jsonParser.getJsnField("Table")
        val event = jsonParser.getJsnField("Event")
        val schema = jsonParser.getJsnField("Schema")
        ("np_user_info".equals(table) && "INSERT".equals(event) && "systoon_user".equals(schema))
      } else {
        false
      }
    })
    newStream.map(line => 1).reduce(_ + _)
  }
}











