package com.ddxz.spark.read

import java.sql.DriverManager
import java.util.UUID

import com.ddxz.spark.read.constant.CommonConstData
import com.ddxz.spark.read.struct.MySqlEntity._
import com.ddxz.spark.read.struct.SparkEntity._
import com.ddxz.spark.read.tools.{CommonTools, HDFSTools}
import com.ddxz.spark.read.constant.CommonConstData._
import com.ddxz.spark.read.struct.OtherEntity.{BaseEventEntity, ColumnEntity}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer


object MySql {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("ReadMysql")
      .master("local")
      .config("spark.sql.parquet.binaryAsString", "true")
      .config("spark.serializer", classOf[KryoSerializer].getName)
      .getOrCreate()
    val sc = spark.sparkContext
    val broadcast: (Broadcast[scala.collection.Map[String, Iterable[IpFacilityEntity]]], Broadcast[scala.collection.Map[String, Iterable[DomainFacilityEntity]]]) = beforeCalculate(sc, spark)
    clearHistoryData()
    // 僵尸木马数据处理
    val fixedRdd = botortrojan(sc, spark, broadcast._1, broadcast._2, "")


    saveFile("/apps/hive/warehouse", "/data", "cncert_botortrojan_event", "20210801", fixedRdd)
    spark.stop()
  }

  def saveFile(hdfsPath: String, localPath: String, name: String, dataDate: String, fixedRdd: RDD[String]): Unit = {
    val hPath = hdfsPath + "/" + name + "/" + dataDate
    val fPath = hPath + "/part-00000"
    val lPath = localPath + "/" + name + "/" + name + "_" + dataDate + ".txt"
    HDFSTools.deletefile(hPath)
    fixedRdd.coalesce(1).saveAsTextFile(hPath)
    if(HDFSTools.fileExists(fPath)) {
      HDFSTools.getfile(fPath, lPath)
    }
  }

  def beforeCalculate(sc: SparkContext, session: SparkSession): (Broadcast[scala.collection.Map[String, Iterable[IpFacilityEntity]]], Broadcast[scala.collection.Map[String, Iterable[DomainFacilityEntity]]]) = {
    broadcastIpDomain(sc, session)
  }

  def clearHistoryData(): Unit = {
    clearHistoryData("base_event_count", "")
    clearHistoryData("base_infrastructure_event_count", "")
  }

  def botortrojan(sc: SparkContext, session: SparkSession, ipList: Broadcast[scala.collection.Map[String, Iterable[IpFacilityEntity]]], domainList: Broadcast[scala.collection.Map[String, Iterable[DomainFacilityEntity]]], dataDate: String): RDD[String] = {
    val hiveFields = "source_type AS `sourceType`, source AS `source`, detail_type_code AS `detailTypeCode`, " +
      "basic_type AS `basicType`, sub_type AS `subType`, detail_type AS `detailType`, ext_type AS `extType`, " +
      "time_all AS `timeAll`, hour AS `hour`, s_ip AS `sIp`, s_ip_n AS `sIpN`, s_port AS `sPort`, " +
      "d_ip AS `dIp`, d_ip_n AS `dIpN`, d_port AS `dPort`, control_relation AS `controlRelation`, data_return AS `dataReturn`, " +
      "s_ip_belong AS `sIpBelong`, s_ip_area AS `sIpArea`, s_ip_operator AS `sIpOperator`, d_ip_belong AS `dIpBelong`," +
      "d_ip_area AS `dIpArea`, d_ip_operator AS `dIpOperator`, s_ip_lng AS `sIpLng`, s_ip_lat AS `sIpLat`, " +
      "d_ip_lng AS `dIpLng`, d_ip_lat AS `dIpLat`, m_position_type AS `mPositionType`, m_area AS `mArea`, " +
      "m_operator AS `mOperator`, m_p_gp AS `mPGp`, m_stringernet AS `mStringernet`, m_mark AS `mMark`"
    val mysqlField = "`source_type`,`source`,`detail_type_code`,`basic_type`,`sub_type`,`detail_type`,`ext_type`,`time_all`," +
      "`eventdate`,`hour`,`s_ip`,`s_ip_n`,`s_port`,`d_ip`,`d_ip_n`,`d_port`,`control_relation`,`data_return`,`s_ip_belong`," +
      "`s_ip_area`,`s_ip_operator`,`d_ip_belong`,`d_ip_area`,`d_ip_operator`,`s_ip_lng`,`s_ip_lat`,`d_ip_lng`,`d_ip_lat`," +
      "`m_position_type`,`m_area`,`m_operator`,`m_p_gp`,`m_stringernet`,`m_mark`,`facility_id`,`dept_id`,`hit_type`,`hit_ip`," +
      "`base_type_back`,`sub_type_back`,`uuid`,`eventdate_part`,`back1`,`back2`,`back3`,`back4`,`back5`,`back6`,`back7`,`back8`,`back9`,`back10`"
    val table = "cncert_botortrojan_event"
    val query = "SELECT " + hiveFields + " FROM " + table + " WHERE eventdata = '" + dataDate + "';"
    import session.implicits._
    val originRdd = session.sql(query)
      .as[BotortrojanEntity]
      .rdd
    val fixedRdd = originRdd
      .map(vo => EventDataEntity(vo.sIp, vo.dIp, vo.toString, "僵尸木马" + hiveSplit + vo.subType))
      .map(vo => bothIpMatch(ipList.value, vo))
      .filter(_.size > 0)
      .flatMap(vo => vo)
      .map(vo => vo.data + hiveSplit + vo.flt + hiveSplit + vo.remark + hiveSplit + UUID.randomUUID() + hiveSplit + dataDate + backField)
    clearHistoryData(table + "_back", dataDate)
    fixedRdd.repartition(3).foreachPartition(vo => saveMysql(table, mysqlField, vo))
    val eventNum = originRdd.filter(_.sIpBelong == "境内")
      .map(vo => (vo.sIpArea, 1))
      .union(originRdd.filter(_.dIpBelong == "境内").map(vo => (vo.dIpArea, 1)))
      .reduceByKey(_+_)
      .map(vo => "僵尸木马" + hiveSplit + vo._1 + hiveSplit + vo._2 + hiveSplit + dataDate)
    val baseEventTable = "base_event_count"
    val baseEventField = "event_type,country,eventcount,eventdate"
    eventNum.repartition(1).foreachPartition(vo => saveMysql(baseEventTable, baseEventField, vo))
    val totalNum = originRdd.count()
    saveMysql(baseEventTable, baseEventField, Iterator("僵尸木马" + hiveSplit + "中国" + hiveSplit + totalNum + hiveSplit + dataDate))
    val fixedNum = fixedRdd.count()
    saveMysql("base_infrastructure_event_count", "event_type,eventcount,baseeventcount,eventdate", Iterator("僵尸木马" + hiveSplit + totalNum + hiveSplit + fixedNum + hiveSplit + dataDate))
    clearHistoryData("cncert_botortrojan_event_group", dataDate)
    mergeMysqlGroup("cncert_botortrojan_event_back", "cncert_botortrojan_event_group",
      "eventdate,facility_id,dept_id,base_type_back,detail_type,s_ip,d_ip,hit_ip,hit_type", "event_count", dataDate)
    fixedRdd
  }

  def mergeMysqlGroup(backTable: String, groupTable: String, columns: String, sumCol: String, dataDate: String): Unit = {
    val conn = DriverManager.getConnection(url, user, password)
    conn.setAutoCommit(false)
    val dml = "INSERT INTO " + groupTable + "(" + columns + "," + sumCol + ") SELECT " + columns + ",count(1) FROM " + backTable + " WHERE eventdate = '" + dataDate + "' GROUP BY " + columns
    val stmt = conn.prepareStatement(dml)
    try{
      stmt.executeUpdate()
      conn.commit()
    } finally {
      stmt.close()
      conn.close()
    }
  }

  def bothIpMatch(ipList: scala.collection.Map[String, Iterable[IpFacilityEntity]], data: EventDataEntity): Array[PostEventDataEntity] = {
    var faclityData = ArrayBuffer[PostEventDataEntity]()
    val sourceMatch = ipList.get(data.srcIp).getOrElse(null)
    val targetMatch = ipList.get(data.tarIp).getOrElse(null)
    if (null != sourceMatch) {
      sourceMatch.foreach(ip => {
        // facilityId-deptId-hitType-hitIp
        faclityData += PostEventDataEntity(ip.facilityId + hiveSplit + ip.deptId + hiveSplit + "0" + hiveSplit + "1", data.data, data.remark)
      })
    } else if (null != targetMatch) {
      targetMatch.foreach(ip => {
        faclityData += PostEventDataEntity(ip.facilityId + hiveSplit + ip.deptId + hiveSplit + "0" + hiveSplit + "2", data.data, data.remark)
      })
    }

    faclityData.toArray
  }

  def saveMysql(table: String, columns: String, data: Iterator[String]): Unit = {
    val conn = DriverManager.getConnection(url, user, password)
    conn.setAutoCommit(false)
    val columnEntityList: ColumnEntity = getColumnEntityList(columns)
    val query = "INSERT INTO " + table + "(" + columnEntityList.name + ") VALUES (" + columnEntityList.index + ")"
    val stmt = conn.prepareStatement(query)
    var num = 0
    data.foreach(row => {
      val values: Array[String] = row.split(hiveSplit)
      for (idx <- 0 to values.size) {
        if (StringUtils.isNoneBlank(values(idx))) {
          stmt.setString(idx + 1, values(idx))
        } else {
          stmt.setString(idx + 1, "")
        }
      }

      stmt.addBatch()
      num += 1

      if (num % 5000 == 0) {
        stmt.executeBatch()
        conn.commit()
        num = 0
      }
    })

    try {
      stmt.executeBatch()
      conn.commit()
    } finally {
      stmt.close()
      conn.close()
    }
  }

  def getColumnEntityList(str: String): ColumnEntity = {
    var name = new ArrayBuffer[String]()
    var index = new ArrayBuffer[String]()

    str.split(",").foreach(field => {
      val alias = field.trim.split(" ")(0)
      name += alias
      index += "?"
    })

    ColumnEntity(name.toArray.mkString(","), index.toArray.mkString(","))
  }

  def clearHistoryData(tableName: String, dataDate: String): Unit = {
    val conn = DriverManager.getConnection(url, user, password)
    conn.setAutoCommit(false)
    val stmt = conn.createStatement();
    val dml = "DELETE FROM " + tableName + "where eventdate = '" + dataDate + "';"
    stmt.executeUpdate(dml)
    try {
      stmt.executeBatch();
      conn.commit()
    } finally {
      stmt.close()
      conn.close()
    }
  }

  def broadcastIpDomain(context: SparkContext, session: SparkSession):
  (Broadcast[scala.collection.Map[String, Iterable[IpFacilityEntity]]], Broadcast[scala.collection.Map[String, Iterable[DomainFacilityEntity]]]) = {
    val facilityQuery = "(select  ips,domain,id as `facilityId`, dept_id as `deptId` from critical_infrastructure_info) as base"
    import session.implicits._
    val baseFacilityRdd = session.read.format("jdbc")
      .option("url", CommonConstData.url)
      .option("user", CommonConstData.user)
      .option("password", CommonConstData.password)
      .option("driver", CommonConstData.driver)
      .option("dbtable", facilityQuery)
      .load()
      .as[FacilityEntity]
      .rdd
      .map(vo => CommonTools.parseIpRange(vo))
      .flatMap(vo => vo)

    val ipListRdd = baseFacilityRdd.filter(vo => vo.ips != "")
      .map(vo => (vo.ips, IpFacilityEntity(vo.ips, vo.facilityId, vo.deptId)))
      .groupByKey
    val domainListRdd = baseFacilityRdd.filter(vo => vo.domain != "")
      .map(vo => (vo.domain, DomainFacilityEntity(vo.domain, vo.facilityId, vo.deptId)))
      .groupByKey

    ipListRdd.coalesce(1).saveAsTextFile("/home/zzg/iplist.txt")
    domainListRdd.coalesce(1).saveAsTextFile("/home/zzg/domainlist.txt")

    val broadcastIp = context.broadcast(ipListRdd.collectAsMap())
    val broadcastDomain = context.broadcast(domainListRdd.collectAsMap())

    (broadcastIp, broadcastDomain)
  }
}