package com.smegz

import com.alibaba.fastjson.JSONObject
import org.apache.commons.lang3.time.DateUtils
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.common.typeinfo.Types
import org.apache.flink.api.java.tuple.Tuple3
import org.apache.flink.connector.jdbc.JdbcConnectionOptions
import org.apache.flink.connector.jdbc.JdbcSink
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.triggers.ContinuousEventTimeTrigger
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.util.OutputTag
import java.time.Duration
import java.util.*

/**
 * @author jje
 * @date  2020/10/11 19:03
 *
 */
class FlinkTest01 {

    companion object {
        @JvmStatic
        fun main(args: Array<String>) {
            FlinkTest01().day01()
        }
    }

    fun day01(){
        val env = StreamExecutionEnvironment.getExecutionEnvironment()
        env.enableCheckpointing(60000, CheckpointingMode.EXACTLY_ONCE)
        env.streamTimeCharacteristic = TimeCharacteristic.EventTime
        // properties about kafka
        val properties = Properties()
        properties.setProperty("bootstrap.servers", "localhost:9098")
        properties.setProperty("group.id", "test")

        // 读取 source
        val topicStream =
            env.addSource(FlinkKafkaConsumer("topic", SimpleStringSchema(), properties), Types.STRING)

        // 进行转换
        val transformedStream = topicStream.map { t ->
                val ts = t.split("\t")
                Tuple3(ts[0], DateUtils.parseDate(ts[1], "yyyy-MM-dd HH:mm:ss"), ts[2].toInt()) }
            .returns(Types.TUPLE(Types.STRING, Types.LOCAL_DATE, Types.INT))
                // 允许延迟 3 分钟
            .assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofMinutes(3)))
            .windowAll(TumblingEventTimeWindows.of(Time.minutes(5)))
            .trigger(ContinuousEventTimeTrigger.of(Time.seconds(10)))
                // 必须显性声明泛型接口
            .apply(AllWindowFunction<Tuple3<String, Date, Int>, JSONObject, TimeWindow>(){
                w, var2, var3 ->
                // 每个5分钟区间的准确查询耗时Pct99
                val sortedList = var2.sortedBy { t -> t.f1}
                val point = sortedList.size * 99 / 100
                val jsons = JSONObject()
                jsons["start_time"] = Date(w.start)
                jsons["end_time"] = Date(w.end)
                jsons["query_count"] = sortedList.size
                jsons["pct99"] = sortedList[point].f2
                var3.collect(jsons)
            }).getSideOutput(OutputTag("stream_03", TypeInformation.of(JSONObject::class.java)))

        // 写入 sink
        transformedStream.addSink(JdbcSink.sink("insert into query_time_status (start_time, end_time, " +
                "pct99, query_count) values (?,?,?,?,?)"
            , {ps, t ->
                ps.setDate(1, t.getSqlDate("start_time"))
                ps.setDate(2, t.getSqlDate("end_time"))
                ps.setInt(3, t.getIntValue("pct99"))
                ps.setLong(4, t.getLongValue("query_count"))
            }, JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                .withUrl("mysql:jdbc://localhost:3306/test_db")
                .withUsername("root")
                .withPassword("123456")
                .withDriverName("com.mysql.cj.jdbc.Driver")
                .build()))

        env.execute("job-01")
    }
}