package com.zhu.app;


import com.alibaba.fastjson.JSONObject;
import com.zhu.bean.EvaluationWindow;
import com.zhu.util.ClickHouseUtil;
import com.zhu.util.ClusterKafkaUtil;
import com.zhu.util.DataFormatUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import java.time.Duration;

/**
 * 好评&差评统计窗口
 */
public class EvaluationWindowApp {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);

        //checkpoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE); //精确一次
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));
         */

        String topic = "topic_db";
        String groupId = "dws_evaluation_window";
        DataStreamSource<String> kafkaDStream = streamExecutionEnvironment.addSource(ClusterKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        kafkaDStream.print(">>>>");

        //todo json filter
           /*
        maxwell ? FlinkCDC ? 动态读取 适合配置流 这里采用maxwell
        {"database":"flink","table":"spu_info","type":"insert","ts":1682429037,
        "xid":784,"commit":true,"data":{"id":13,"spu_name":"zhu","description":"zhu","category3_id":322,"tm_id":3}}
        {databases table type insert
         */
        SingleOutputStreamOperator<JSONObject> jsonObjDStream = kafkaDStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String data, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(data);
                    if (jsonObject != null) {
                        if ("flink".equals(jsonObject.getString("database")) && "insert".equals(jsonObject.getString("type"))) {
                            JSONObject tableData = jsonObject.getJSONObject("data");
                            collector.collect(tableData);
                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        kafkaDStream.print(">>>json");


        //todo bean
        SingleOutputStreamOperator<EvaluationWindow> evaluationBeanDStream = jsonObjDStream.map(
                new MapFunction<JSONObject, EvaluationWindow>() {
                    @Override
                    public EvaluationWindow map(JSONObject jsonObject) throws Exception {
                        String emotion = jsonObject.getString("emotion");
                        String createTime = jsonObject.getString("create_time");
                        return new EvaluationWindow(null, null, 1L, createTime, emotion, System.currentTimeMillis());



                    }
                }
        );

        evaluationBeanDStream.print("bean>>>>>");

        SingleOutputStreamOperator<EvaluationWindow> evaluationWithWaterMarkDStream = evaluationBeanDStream.assignTimestampsAndWatermarks(WatermarkStrategy.<EvaluationWindow>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner(new SerializableTimestampAssigner<EvaluationWindow>() {
                    @Override
                    public long extractTimestamp(EvaluationWindow evaluationWindow, long l) {
                        return DataFormatUtil.toTs(evaluationWindow.getCreateTime(), true);
                    }
                }));

        KeyedStream<EvaluationWindow, String> evaluationWindowStringKeyedStream = evaluationWithWaterMarkDStream.keyBy(EvaluationWindow::getEmotion);
        WindowedStream<EvaluationWindow, String, TimeWindow> windowEvaluationDSteam = evaluationWindowStringKeyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10)));

        //todo reduce
        SingleOutputStreamOperator<EvaluationWindow> resultDStream = windowEvaluationDSteam.reduce(
                new ReduceFunction<EvaluationWindow>() {
                    @Override
                    public EvaluationWindow reduce(EvaluationWindow evaluationWindow1, EvaluationWindow evaluationWindow2) throws Exception {
                        evaluationWindow1.setCount(evaluationWindow1.getCount() + evaluationWindow2.getCount());
                        return evaluationWindow1;


                    }
                }
                ,
                new ProcessWindowFunction<EvaluationWindow, EvaluationWindow, String, TimeWindow>() {
                    @Override
                    public void process(String key, ProcessWindowFunction<EvaluationWindow, EvaluationWindow, String, TimeWindow>.Context context, Iterable<EvaluationWindow> iterable, Collector<EvaluationWindow> collector) throws Exception {
                        EvaluationWindow resultEvaluationWindowCount = iterable.iterator().next();
                        String start = DataFormatUtil.toYmdHms(context.window().getStart());
                        String end = DataFormatUtil.toYmdHms(context.window().getEnd());

                        resultEvaluationWindowCount.setTs(System.currentTimeMillis());
                        resultEvaluationWindowCount.setStt(start);
                        resultEvaluationWindowCount.setEdt(end);
                        collector.collect(resultEvaluationWindowCount);
                    }
                }
        );

        resultDStream.print("window >>>");


        //todo ClickHouse
        resultDStream.addSink(ClickHouseUtil.getClickHouseSinkFunction(
                "insert into dws_interaction_evaluation_window values(?,?,?,?,?)"
                ));


        streamExecutionEnvironment.execute();

    }

}