package com.xinqing.bigdata.flink.datastream.sink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.xinqing.bigdata.flink.datastream.model.Result;
import com.xinqing.bigdata.flink.datastream.model.Student;
import com.xinqing.bigdata.flink.datastream.selector.MySelector;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.time.Duration;
import java.util.Properties;

/**
 * @Author:CHQ
 * @Date:2021/4/23 17:16
 * @Description
 */
public class Kafka2KuduWithWaterMark {
    public static void main(String args[]) throws Exception {

        Logger logger = LoggerFactory.getLogger(Kafka2KuduWithWaterMark.class);
        //  StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); //固定Web-UI端口
        // executionConfig.setAutoWatermarkInterval(200); //设置自动提交wm的时间间隔，避免每条记录生成WM后提交一次，那样会影响程序性能

        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointInterval(1000); //设置CheckPoint时间间隔
        checkpointConfig.setCheckpointTimeout(60000);//设置CheckPoint超时时间，失败则弃用
        checkpointConfig.setMaxConcurrentCheckpoints(1); //最多同时允许存在一个CheckPoint

        //Kafka集群配置
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "10.201.7.175:9092");
        properties.setProperty("group.id", "chq9");
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // properties.setProperty("auto.offset.reset", "earliest");
        properties.setProperty("auto.offset.reset", "latest");

        env.addSource(new FlinkKafkaConsumer<>("student11", new SimpleStringSchema(), properties))
                .map((MapFunction<String, Student>) value -> {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String sid = jsonObject.getString("sid");
                    int chineseScore = jsonObject.getIntValue("chineseScore");
                    int mathScore = jsonObject.getIntValue("mathScore");
                    Long timestamp = jsonObject.getLong("timestamp");
                    return new Student(sid, chineseScore, mathScore, timestamp);
                })
                //设置了滚动事件窗口，就必须配合wm使用，并设置空闲数据源的最大超时时间,一旦在规定时间内窗口中没有数据流入，则自动关窗
                .assignTimestampsAndWatermarks(WatermarkStrategy.<Student>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                        .withIdleness(Duration.ofSeconds(20))
                        .withTimestampAssigner((SerializableTimestampAssigner<Student>) (element, recordTimestamp) -> element.getTimestamp()))
                // .assignTimestampsAndWatermarks(WatermarkStrategy.forMonotonousTimestamps())
                .keyBy(new MySelector())
                .window(TumblingEventTimeWindows.of(Time.seconds(40)))
                .process(new ProcessWindowFunction<Student, Result, String, TimeWindow>() {
                    @Override
                    public void process(String key, Context context, Iterable<Student> elements, Collector<Result> out) throws Exception {
                        //统计40s钟每个sid的总分数
                        int sumScore = 0;
                        for (Student element : elements) {
                            logger.info("开始关窗...元素信息================>" + element.toString());
                            sumScore += element.getChineseScore() + element.getMathScore();
                        }
                        out.collect(new Result(elements.iterator().next().getSid(), sumScore));
                    }
                })
                //sink的时候不要采用JdbcSink.sink的方式(flink 1.12版本)，
                // 这种方式对于ProcessTime时间语义暂且还能通过，但对于EventTime就不行了，还得通过传统的继承RichSinkFunction的自定义类。原因暂时不详。
            /*    .addSink(JdbcSink.sink(  //sink端输出格式为MD5(sid),sum(sorce),timestamp,
                        "insert into student2(sid,sumScore,`timestamp`) values(?,?,?)",
                        (ps, data) -> {
                            System.out.println("------------>" + data.toString());
                            //入库
                            ps.setString(1, data.getKey());
                            ps.setInt(2, data.getSumScore());
                            ps.setLong(3, data.getTimestamp());
                        },
                        new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                               // .withDriverName("com.cloudera.impala.jdbc41.Driver")
                                //.withUrl("jdbc:impala://10.201.7.126:21050/default;auth=noSasl")
                                .withDriverName("com.mysql.jdbc.Driver")
                                .withUrl("jdbc:mysql://10.201.7.174:3307/qing?useUnicode=true&characterEncoding=utf8&autoReconnect=true")
                                .withUsername("PT_super_user")
                                .withPassword("Hb86be03cc_H3,,2c8ff86#326c3")
                                .build()
                )).setParallelism(1).name("my sink");*///sink端并行度为1)
                .addSink(new RichSinkFunction<Result>() {
                             private Connection conn;

                             @Override
                             public void open(Configuration parameters) {
                                 try {
                                     System.out.println("正在获取Mysql连接");
                                     Class.forName("com.mysql.jdbc.Driver");
                                     conn = DriverManager.getConnection("jdbc:mysql://10.201.7.174:3307/qing?useUnicode=true&characterEncoding=utf8&autoReconnect=true",
                                             "PT_super_user", "Hb86be03cc_H3,,2c8ff86#326c3");
                                 } catch (Exception e) {
                                     logger.error("获取Mysql连接失败:", e);
                                 }
                             }

                             @Override
                             public void close() {
                                 try {
                                     if (conn != null) {
                                         conn.close();
                                     }
                                 } catch (Exception e) {
                                     System.out.println(e);
                                 }
                             }

                             @Override
                             public void invoke(Result value, Context context) {
                                 try {
                                     logger.info("进入sink端的data为-------------->" + value.toString());
                                     //入库
                                     PreparedStatement ps = conn.prepareStatement("INSERT INTO student2(sid,sumScore) VALUES(?,?)");
                                     ps.setString(1, value.getKey());
                                     ps.setInt(2, value.getSumScore());
                                     ps.execute();
                                 } catch (Exception e) {
                                     logger.error("数据入库失败：", e);
                                 }

                             }
                         }
                ).setParallelism(1).name("my sink");

        env.execute();
    }
}
