package com.xinqing.test.flink.datastream;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.feidee.fdcommon.util.MD5Util;
import com.xinqing.test.flink.datastream.model.Result;
import com.xinqing.test.flink.datastream.model.Student;
import com.xinqing.test.flink.datastream.selector.MySelector;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Properties;

/**
 * @Author:CHQ
 * @Date:2021/4/20 14:33
 * @Description
 */
public class Kafka2Kudu {
    public static void main(String args[])  {
        try {
            //使用Kafka数据源中的事件时间语义
            //开启checkpoint并记录到hdfs、并设置精准一次语义
            //设置checkpoint超时时间，两次checkpoint时间间隔、
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
            env.getCheckpointConfig().setCheckpointTimeout(60000);


            Properties properties = new Properties();
            properties.setProperty("bootstrap.servers", "10.201.7.175:9092");
            properties.setProperty("group.id", "chq1");
            properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            //properties.setProperty("auto.offset.reset", "earliest");
            properties.setProperty("auto.offset.reset", "latest");
            //先map反序列化取出JSON串，后设置watermark水位线，再自定义key选择器，后开窗，最后统计输出
            //source端Transform端并行度为4
            //初始化kafka序列化、反序列化、消费offset、topic，groupid属性
            //使用keyselctor选择器，取sid的DM5值作为sink端的主键
            SingleOutputStreamOperator<Result> source = env.addSource(new FlinkKafkaConsumer<>("student", new SimpleStringSchema(), properties))
                    .setParallelism(4)
                    .map((MapFunction<String, Student>) value -> {
                        JSONObject jsonObject = JSON.parseObject(value);
                        return new Student(
                                jsonObject.getString("sid"),
                                jsonObject.getIntValue("chineseScore"),
                                jsonObject.getIntValue("mathScore"),
                                jsonObject.getLong("timestamp"));
                    })
                    .assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofMillis(1000)))
                    .keyBy(new MySelector())
                    .window(TumblingEventTimeWindows.of(Time.seconds(30)))
                    .process(new ProcessWindowFunction<Student, Result, String, TimeWindow>() {
                        @Override
                        public void process(String key, Context context, Iterable<Student> elements, Collector<Result> out) throws Exception {
                            for (Student element : elements) {
                                new Result(MD5Util.string2MD5(key), element.getChineseScore() + element.getMathScore(), element.getTimestamp());
                            }
                        }
                    });

            source.print("----------------------------->");

            //sink端输出格式为MD5(sid),sum(sorce),timestamp,
            source.addSink(JdbcSink.sink(
                    "insert into dw.student(sid,sumScore,`timestamp`) values(?,?,?) ON DUPLICATE KEY UPDATE sid = ?",
                    (ps, data) -> {
                        ps.setString(1, data.getKey());
                        ps.setInt(2, data.getSumScore());
                        ps.setLong(3, data.getTimestamp());
                        ps.setString(4, data.getKey());
                    },
                    new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                            .withDriverName("org.apache.hive.jdbc.HiveDriver")
                            .withUrl("jdbc:hive2://10.201.7.126:21050/default;auth=noSasl")
                            .build()
            )).setParallelism(1);//sink端并行度为1

            env.execute();
        }catch (Exception e){
            System.out.println(e);
        }

    }


}

