package com.gitee.etl.process

import java.io.File

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import com.gitee.etl.bean.{LogBean, WideLog}
import com.gitee.etl.process.bean.MQETL
import com.gitee.utils.GlobalConfigUtil
import com.gitee.utils.ip.IPSeeker
import nl.basjes.parse.httpdlog.HttpdLoglineParser
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment, _}

class LogDataEtl(envs: StreamExecutionEnvironment) extends MQETL(envs) {

  /**
    * 处理数据的接口
    */
  override def process(): Unit = {
    /**
      * 实现思路:
      *   1.将String日志解析为Log日志
      *   2.将Log日志转换为扩宽后的WideLog日志
      *   3.将WideLog日志转换为JSON字符串
      *   4.存入kafka的dwd层
      */
    //获取数据源
    val logStringData: DataStream[String] = getKafkaDataStream(GlobalConfigUtil.`input.topic.ods_log`)

    //1.将String日志解析为Log日志
    val logBeanData: DataStream[LogBean] = etl(logStringData)

    //2.将Log日志转换为扩宽后的WideLog日志
    val wideLogData: DataStream[WideLog] = logBeanData.map(new RichMapFunction[LogBean, WideLog] {
      var iPSeeker: IPSeeker = _
      var file: File = _
      override def close(): Unit = {
        file.deleteOnExit()
      }

      //获取分布式缓存文件
      override def open(parameters: Configuration): Unit = {
        file = getRuntimeContext.getDistributedCache.getFile("ip")
        iPSeeker = new IPSeeker(file)
      }

      override def map(logBean: LogBean): WideLog = {
        //根据IP得到地区名
        val country: String = iPSeeker.getCountry(logBean.getIp)

        //测试能不能获取到地区名字
        //        println(country)

        //解析地区名,省份,城市
        var province: String = ""
        var city: String = ""
        val arr1: Array[String] = country.split("省")
        if (arr1.length > 1) {
          //不是直辖市
          province = arr1(0)+"省"
          city = arr1(1)
        }else {
          //是直辖市
          val arr2: Array[String] = arr1(0).split("市")
          if (arr2.length > 1){
            //直辖市
            province = arr2(0) + "市"
            city = arr2(1)
          }else{
            //自治区
            val arr3: Array[String] = arr2(0).split("区")
            province = arr3(0) + "区"
            city = arr3(1)
          }
        }

        new WideLog(
          logBean.getConnectionClientUser,
          logBean.getIp,
          logBean.getRequestTime,
          logBean.getMethod,
          logBean.getResolution,
          logBean.getRequestProtocol,
          logBean.getRequestStatus,
          logBean.getResponseBodyBytes,
          logBean.getReferer,
          logBean.getUserAgent,
          logBean.getReferDomain,
          province,
          city,
          "时间没有"
        )

      }
    })

    //3.将WideLog日志转换为JSON字符串
    val logJson: DataStream[String] = wideLogData.map(log => JSON.toJSONString(log, SerializerFeature.DisableCircularReferenceDetect))

    //测试打印日志
    //logJson.print()

    //4.存入kafka的dwd层
    logJson.addSink(kafkaProducer(GlobalConfigUtil.`output.topic.dwd_log`))

  }

  /**
    * 将String日志解析为Log日志
    * 实现思路:
    * 1...
    *
    * @param logStringData
    * @return
    */
  def etl(logStringData: DataStream[String]): DataStream[LogBean] = {
    logStringData.map(new RichMapFunction[String, LogBean] {
      var parser: HttpdLoglineParser[LogBean] = _

      override def close(): Unit = {

      }

      //创建字符解析器
      override def open(parameters: Configuration): Unit = {
        parser = LogBean.createLogParse()
      }

      //返回Bean对象
      override def map(logline: String): LogBean = {
        LogBean(parser, logline)
      }
    })
  }
}
