package com.etc.log

import java.sql.{Connection, PreparedStatement}

import com.ggstar.util.ip.IpHelper
import org.apache.spark.sql.SparkSession
import scala.collection.mutable.ListBuffer

case class CleanLog(val url: String,val types: String,val id: String,val regionByIp: String,val ip: String,val dates: String)

/**
  *日志清洗
  */
object Log {

  var con: Connection = null
  var pstm: PreparedStatement = null

  def saveCleanLog(logs: ListBuffer[CleanLog]): Unit = {
    try {
      con = MySQLUtils.getConnection()
      //手动提交
      con.setAutoCommit(false)
      pstm = con.prepareStatement("insert into cleanlog(url,types,id,regionByIp,ip,dates) values(?,?,?,?,?,?)")
      for (i <- logs) {
        pstm.setString(1, i.url)
        pstm.setString(2, i.types)
        pstm.setString(3, i.id)
        pstm.setString(4, i.regionByIp)
        pstm.setString(5, i.ip)
        pstm.setString(6, i.dates)
        //添加一次预定义参数
        pstm.addBatch()
        //提交
        pstm.executeUpdate()
        con.commit()
      }

    } catch {
      case e: Exception => e.printStackTrace()
    } finally {
      MySQLUtils.release(con, pstm)
    }
  }
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("log").master("local").getOrCreate()
    val unit = spark.read.textFile("D:\\flume\\mooc_access.log")
    import spark.implicits._

    val value = unit.filter(x => {
      val strings = x.split(" ")
      //设置过滤条件
      if (strings.size < 25 || strings(11).length < 5 || strings(11).split("/").size < 6) {
        false
      } else {
        true
      }
    }).map(a => {
      //转换时间类型
      val dates = DateUtils.parse(a.split(" ")(3) + a.split(" ")(4))
      //去掉字符串外面的冒号
      val url = a.split(" ")(11).replace('"', ' ')
      val types = a.split(" ")(11).split("/")(3)
      val id = a.split(" ")(11).split("/")(4)
      val ip = a.split(" ")(0)
      //ip转换为城市
      val regionByIp = IpHelper.findRegionByIp(ip)

      (url, types, id, regionByIp, ip, dates)
      //转成DateFrom
    }).toDF("url", "types", "id", "regionByIp", "ip", "dates")
      .foreachPartition(a => {
        //创建list
          val list = new ListBuffer[CleanLog]
          a.foreach(info =>{
            val url = info.getAs[String]("url")
            val types = info.getAs[String]("types")
            val id = info.getAs[String]("id")
            val regionByIp = info.getAs[String]("regionByIp")
            val ip = info.getAs[String]("ip")
            val dates = info.getAs[String]("dates")
            //把CleanLog放入list
            list.append(CleanLog(url, types, id, regionByIp, ip, dates))
          })
        //list 放入 saveCleanLog方法中
        saveCleanLog(list)
      })
    spark.close()
  }
}

