package com.atguigu.gmall.app.dws.transaction;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.app.func.DimAsyncFunction;
import com.atguigu.gmall.bean.TradeCourseSubjectCategoryUserOrderBean;
import com.atguigu.gmall.utils.ClickHouseUtil;
import com.atguigu.gmall.utils.DateFormatUtil;
import com.atguigu.gmall.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

public class DwsTradeTrademarkCategoryUserRefundWindow {
    public static void main(String[] args) throws Exception {

        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // TODO 2. 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop100:8020/edu");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */


        // TODO 3. 从 Kafka dwd_trade_order_detail 主题读取下单明细数据
        String topic = "dwd_trade_order_detail";
        String groupId = "dws_trade_course_subject_category_user_order_window";
        FlinkKafkaConsumer<String> kafkaConsumer = KafkaUtil.getFlinkConsumer(topic, groupId);
        DataStreamSource<String> source = env.addSource(kafkaConsumer);



        // TODO 4. 转换数据结构
        SingleOutputStreamOperator<JSONObject> mappedStream = source.map(JSON::parseObject);
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> javaBeanStream = mappedStream.map(
                jsonObj -> {
                    String orderId = jsonObj.getString("order_id");
                    String userId = jsonObj.getString("user_id");
                    Long ts = jsonObj.getLong("ts") * 1000L;
                    TradeCourseSubjectCategoryUserOrderBean courseSubjectCategoryUserOrderBean = TradeCourseSubjectCategoryUserOrderBean.builder()
                            .orderIdSet(new HashSet<String>(
                                    Collections.singleton(orderId)
                            ))
                            .userId(userId)
                            .orderCount(1L)
                            .peopleCount(1L)
                            .orderAmount(jsonObj.getDouble("final_amount"))
                            .ts(ts)
                            .build();
                    return courseSubjectCategoryUserOrderBean;
                }
        );


        // TODO 5. 设置水位线
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> withWatermarkDS = javaBeanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<TradeCourseSubjectCategoryUserOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TradeCourseSubjectCategoryUserOrderBean>() {
                                    @Override
                                    public long extractTimestamp(TradeCourseSubjectCategoryUserOrderBean javaBean, long recordTimestamp) {
                                        return javaBean.getTs();
                                    }
                                }
                        )
        );


        // TODO 7. 分组
        KeyedStream<TradeCourseSubjectCategoryUserOrderBean, String> keyedForAggregateStream = withWatermarkDS.keyBy(
                new KeySelector<TradeCourseSubjectCategoryUserOrderBean, String>() {
                    @Override
                    public String getKey(TradeCourseSubjectCategoryUserOrderBean javaBean) throws Exception {
                        return javaBean.getCourseId() +
                                javaBean.getUserId();
                    }
                }
        );

        // TODO 8. 开窗
        WindowedStream<TradeCourseSubjectCategoryUserOrderBean, String, TimeWindow> windowDS = keyedForAggregateStream.window(TumblingEventTimeWindows.of(
                org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)));


        // TODO 9. 聚合
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> reducedStream = windowDS.reduce(
                new ReduceFunction<TradeCourseSubjectCategoryUserOrderBean>() {
                    @Override
                    public TradeCourseSubjectCategoryUserOrderBean reduce(TradeCourseSubjectCategoryUserOrderBean value1, TradeCourseSubjectCategoryUserOrderBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setPeopleCount(value1.getPeopleCount() + value2.getPeopleCount());
                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeCourseSubjectCategoryUserOrderBean, TradeCourseSubjectCategoryUserOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String key, Context context, Iterable<TradeCourseSubjectCategoryUserOrderBean> elements, Collector<TradeCourseSubjectCategoryUserOrderBean> out) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(context.window().getStart());
                        String edt = DateFormatUtil.toYmdHms(context.window().getEnd());
                        for (TradeCourseSubjectCategoryUserOrderBean element : elements) {
                            element.setStt(stt);
                            element.setEdt(edt);
                            element.setOrderCount((long) (element.getOrderIdSet().size()));
                            element.setTs(System.currentTimeMillis());
                            out.collect(element);
                        }
                    }
                }
        );

        // TODO 10. 维度关联，补充与分组无关的维度字段
        // 10.1 课程信息表 course_info
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> withCourseStream = AsyncDataStream.unorderedWait(
                reducedStream,
                new DimAsyncFunction<TradeCourseSubjectCategoryUserOrderBean>("dim_course_info".toUpperCase()) {
                    @Override
                    public void join(TradeCourseSubjectCategoryUserOrderBean javaBean, JSONObject jsonObj) {
                        javaBean.setCourseName(jsonObj.getString("name"));
                        javaBean.setSubjectId(jsonObj.getString("subjectId"));
                    }

                    @Override
                    public String getKey(TradeCourseSubjectCategoryUserOrderBean javaBean) {
                        return javaBean.getCourseId();
                    }
                },
                100, TimeUnit.SECONDS
        );


        // 10.2 科目表 base_subject_info
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> withSubjectStream = AsyncDataStream.unorderedWait(
                withCourseStream,
                new DimAsyncFunction<TradeCourseSubjectCategoryUserOrderBean>("dim_base_subject_info".toUpperCase()) {
                    @Override
                    public void join(TradeCourseSubjectCategoryUserOrderBean javaBean, JSONObject jsonObj) {
                        javaBean.setSubjectName(jsonObj.getString("name"));
                        javaBean.setCategoryId(jsonObj.getString("categoryId"));
                    }

                    @Override
                    public String getKey(TradeCourseSubjectCategoryUserOrderBean javaBean) {
                        return javaBean.getSubjectId();
                    }
                },
                100, TimeUnit.SECONDS
        );


        // 10.3 分类表 base_category_info
        SingleOutputStreamOperator<TradeCourseSubjectCategoryUserOrderBean> withCategoryStream = AsyncDataStream.unorderedWait(
                withSubjectStream,
                new DimAsyncFunction<TradeCourseSubjectCategoryUserOrderBean>("dim_base_category_info".toUpperCase()) {
                    @Override
                    public void join(TradeCourseSubjectCategoryUserOrderBean javaBean, JSONObject jsonObj) {
                        javaBean.setCategoryName(jsonObj.getString("name"));
                    }

                    @Override
                    public String getKey(TradeCourseSubjectCategoryUserOrderBean javaBean) {
                        return javaBean.getCategoryId();
                    }
                },
                100, TimeUnit.SECONDS
        );


        // TODO 11. 写出到 OLAP 数据库
        withSubjectStream.<TradeCourseSubjectCategoryUserOrderBean>addSink(ClickHouseUtil.<TradeCourseSubjectCategoryUserOrderBean>getJdbcSink(
                "insert into dws_trade_course_subject_category_user_order_window values(?,?,?,?,?,?,?,?,?,?,?,?,?)"
        ));

        env.execute(groupId);
    }
}
