package com.bigdata.flink.producer

import java.sql.Date
import java.text.SimpleDateFormat
import java.time.Duration

import com.bigdata.flink.conf.KafkaProperties
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.slf4j.LoggerFactory

import scala.collection.mutable

object LogAnalysisScala {

  //在生产上日志建议使用这种方式
  val logger = LoggerFactory.getLogger("LogAnalysis1")

  def main(args: Array[String]): Unit = {

    val senv = StreamExecutionEnvironment.getExecutionEnvironment

    senv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val topic = KafkaProperties.getTopic

    val properties = KafkaProperties.getKafkaProperties

    val consumer = new FlinkKafkaConsumer[String](topic,
                                  new SimpleStringSchema(),
                                  properties)
    //接收Kafka数据
    val data = senv.addSource(consumer)

    val logData = data.map(x => {
      val splits = x.split("\t")
      val level = splits(2)
      val timeStr = splits(3)
      var time = 0L
      try{
        val sourceFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        time = sourceFormat.parse(timeStr).getTime
      } catch {
        case e: Exception => {
          logger.error(s"time parse error: $timeStr", e.getMessage)
        }
      }
      val domain = splits(5)
      val traffic = splits(6).toLong
      (level, time, domain, traffic)
    }).filter(_._2 != 0)
      .filter(_._1 == "E")
      .map(x => {
        (x._2, x._3, x._4)
      })  //1 level(抛弃)  2 time  3 domain  4 traffic

//    logData.assignTimestampsAndWatermarks(WatermarkStrategy
//      .forBoundedOutOfOrderness[(Long, String, Long)](Duration.ofSeconds(10))
//      .withTimestampAssigner(new SerializableTimestampAssigner[(Long, String, Long)] {
//        override def extractTimestamp(element: (Long, String, Long), recordTimestamp: Long): Long = {
//          element._1
//        }
//      })
//    ).keyBy(1)
//      .window(TumblingEventTimeWindows.of(Time.seconds(60)))
//      .apply(new WindowFunction[(Long,String,Long),(String,String,Long),Tuple,TimeWindow] {
//        override def apply(key: Tuple, window: TimeWindow, input: Iterable[(Long, String, Long)], out: Collector[(String, String, Long)]): Unit = {
//          val domain = key.getField(0).toString
//          var sum = 0L
//          val iterator = input.iterator
//          var time = ""
//          while (iterator.hasNext){
//            val next = iterator.next()
//            sum += next._3
//            time = new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(next._1))
//          }
//
//          out.collect((time,domain,sum))
//        }
//      })

    val mysqlData = senv.addSource(new CustomMysqlSource)
    //mysqlData.print().setParallelism(1)

    val connectData = logData.connect(mysqlData)
      .flatMap(new CoFlatMapFunction[(Long,String,Long), mutable.HashMap[String,String], String] {

        var userDomainMap: mutable.HashMap[String, String] = mutable.HashMap[String, String]()
        // log
        override def flatMap1(value: (Long, String, Long), out: Collector[String]): Unit = {
          val domain = value._2
          val userId = userDomainMap.getOrElse(domain, "")

          println("~~~~~" + userId)

          out.collect(value._1 + "\t" + value._2 + "\t" + value._3 + "\t" + userId)

        }

        // MySQL
        override def flatMap2(value: mutable.HashMap[String, String], out: Collector[String]): Unit = {
          userDomainMap = value
        }
      })

    connectData.print()

    senv.execute("LogAnalysisScala")
  }
}
