package com.yeming.flink.mudlog.handle.mydata

import java.util.{Date, Properties}

import com.alibaba.fastjson.{JSON, JSONObject}
import com.google.gson.{Gson, GsonBuilder}
import com.yeming.flink.mudlog.LoggingBean
import com.yeming.flink.mudlog.utils.Constants
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.common.serialization.StringDeserializer

/**
 * 1. 数据的清洗，然后转存入ES中进行可视化展示
 * 2. 复制分流，转入hdfs中进行数据备份或者用于后续的数据挖掘与分析
 */
object CountPerSecondsHandle {

  private val gson: Gson = new GsonBuilder().create()

  /**
   * 主函数
   *
   * @param args
   */
  def main(args: Array[String]): Unit = {

    // 初始化环境变量
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 环境设置
    streamEnv.setParallelism(1)
    streamEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    // 添加数据源，连接到Kafka
    val props = new Properties()

    props.setProperty("bootstrap.servers", "f2:9092,f3:9092,f4:9092")
    props.setProperty("group.id", "flink_5")
    props.setProperty("key.deserializer", classOf[StringDeserializer].getName)
    props.setProperty("value.deserializer", classOf[StringDeserializer].getName)
    props.setProperty("auto.offset.reset", "earliest")

    val stream: DataStream[String] = streamEnv.addSource(new FlinkKafkaConsumer[String]("mudlog", new SimpleStringSchema, props))

    // 数据的ETL清洗操作 @timestamp
    val result = stream.filter(_.contains(Constants.LOG_TYPE_PREFIX))
      .map(msg => {
      val jt: JSONObject = JSON.parseObject(msg)
      val body: String = jt.getString("message")
        val date: Date = jt.getDate("@timestamp")
      val objectJsonString: String = body.split(Constants.LOG_TYPE_PREFIX)(1)
        val bean: LoggingBean = gson.fromJson(objectJsonString, classOf[LoggingBean])
        bean.logTime = date.getTime
        bean
      })
      .assignAscendingTimestamps(_.logTime)
      .map(log => (log,1))
      .timeWindowAll(Time.seconds(1))
        .sum(1)
        .map(log => log._2)

//    stream.print()
    result.print()
//    timeStream.print()
    // 开始执行流计算
    streamEnv.execute("ViewCompute")
  }
}
