package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.DimAsyncFunction;
import com.atguigu.bean.TestCourseBean;

import com.atguigu.util.MyClickHouseUtil;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

public class DwsTradeTrademarkCategoryUserRefundWindow1 {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // TODO 2 设置状态后端
                /*
                env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                env.setStateBackend(new HashMapStateBackend());
                env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                System.setProperty("HADOOP_USER_NAME", "atguigu");
                 */
        // TODO 3. 从 Kafka dwd_test_test_exam 主题读取考试主题的数据
        String topic = "dwd_test_test_exam1";
        String groupId = "dws_trade_trademark_category_user_refund_window1";
        DataStreamSource<String> dbStream = env.addSource(KafkaUtil.getKafkaConsumer(topic, groupId));

        // TODO 4. 转换数据结构
        SingleOutputStreamOperator<JSONObject> mapStream = dbStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                if (jsonObject.getString("paper_id")!=null){
                    collector.collect(jsonObject);
                }
            }
        });



        SingleOutputStreamOperator<TestCourseBean> javaBeanStream = mapStream.map(jsonObject -> {
            return TestCourseBean.builder()
                    .testPersonCount(1L)
                    .paperId(jsonObject.getString("paper_id"))
                    .avgScore(jsonObject.getDouble("score"))
                    .avgDuringTime(jsonObject.getDouble("duration_sec"))
                    .ts(jsonObject.getLong("ts")*1000)
                    .build();
        });

        // TODO 5. 维度关联，补充与分组相关的维度字段
        // 关联 sku_info 表
        SingleOutputStreamOperator<TestCourseBean> withSkuInfoStream = AsyncDataStream.unorderedWait(
                javaBeanStream,
                new DimAsyncFunction<TestCourseBean>("dim_test_paper".toUpperCase()) {
                    @Override
                    public void join(TestCourseBean obj, JSONObject dimJsonObj) {
                        obj.setCourseId(dimJsonObj.getString("courseId"));
                    }

                    @Override
                    public String getKey(TestCourseBean obj) {
                        return obj.getPaperId();
                    }
                }, 100L, TimeUnit.SECONDS
        );



        // TODO 6. 设置水位线
        SingleOutputStreamOperator<TestCourseBean> withWaterMarkStream = withSkuInfoStream.assignTimestampsAndWatermarks(WatermarkStrategy
                .<TestCourseBean>forBoundedOutOfOrderness(Duration.ofSeconds(2L))
                .withTimestampAssigner(new SerializableTimestampAssigner<TestCourseBean>() {
                    @Override
                    public long extractTimestamp(TestCourseBean tradeTrademarkCategoryUserRefundBean, long l) {
                        return tradeTrademarkCategoryUserRefundBean.getTs();
                    }
                }));



        // TODO 7. 分组
        KeyedStream<TestCourseBean, String> keyedStream = withWaterMarkStream.keyBy(new KeySelector<TestCourseBean, String>() {
            @Override
            public String getKey(TestCourseBean javaBean) throws Exception {
                return javaBean.getCourseId();
            }
        });

        // TODO 8. 开窗
        WindowedStream<TestCourseBean, String, TimeWindow> windowStream = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10L)));

        // TODO 9. 聚合
        SingleOutputStreamOperator<TestCourseBean> reduceStream = windowStream.reduce(new ReduceFunction<TestCourseBean>() {
            @Override
            public TestCourseBean reduce(TestCourseBean testBean, TestCourseBean t1) throws Exception {
                testBean.setTestPersonCount(testBean.getTestPersonCount() + t1.getTestPersonCount());
                testBean.setAvgDuringTime((testBean.getAvgDuringTime()*testBean.getTestPersonCount() + t1.getAvgDuringTime()) / (testBean.getTestPersonCount() + t1.getTestPersonCount()));
                testBean.setAvgScore((testBean.getAvgScore()*testBean.getTestPersonCount()) / (testBean.getTestPersonCount() ));

                return testBean;
            }
        }, new WindowFunction<TestCourseBean, TestCourseBean, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow timeWindow, Iterable<TestCourseBean> iterable, Collector<TestCourseBean> collector) throws Exception {
                TestCourseBean tradeTrademarkCategoryUserRefundBean = iterable.iterator().next();
                tradeTrademarkCategoryUserRefundBean.setStt(DateFormatUtil.toYmdHms(timeWindow.getStart()));
                tradeTrademarkCategoryUserRefundBean.setEdt(DateFormatUtil.toYmdHms(timeWindow.getEnd()));
                tradeTrademarkCategoryUserRefundBean.setTs(System.currentTimeMillis());
                collector.collect(tradeTrademarkCategoryUserRefundBean);
            }
        });

        reduceStream.print("reduce>>>");


        env.execute(groupId);


    }
}
