package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.DimAsyncFunction;
import com.atguigu.bean.SubjectOrderBean;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.MyClickHouseUtil;
import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

public class Dws131_SubjectOrderWindow {
    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.读取KafkaDWD层下单明细主题数据
        DataStreamSource<String> kafkaDS = env.fromSource(MyKafkaUtil.getKafkaSource("dwd_trade_order_detail", "subject_order"), WatermarkStrategy.noWatermarks(), "kafka-source");

        //3.过滤Null值并转换为JSON对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    out.collect(JSON.parseObject(value));
                }
            }
        });

        //4.按照订单明细ID分组,去重由Left Join产生的重复数据，并直接转换为JavaBean对象
        SingleOutputStreamOperator<SubjectOrderBean> subjectOrderDS = jsonObjDS
                .keyBy(json -> json.getString("id"))
                .flatMap(new RichFlatMapFunction<JSONObject, SubjectOrderBean>() {

                    private ValueState<String> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        StateTtlConfig ttlConfig = new StateTtlConfig.Builder(Time.seconds(5))
                                .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                                .build();
                        ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("value-state", String.class);
                        stateDescriptor.enableTimeToLive(ttlConfig);

                        valueState = getRuntimeContext().getState(stateDescriptor);
                    }

                    @Override
                    public void flatMap(JSONObject value, Collector<SubjectOrderBean> out) throws Exception {

                        //取出状态数据
                        String state = valueState.value();

                        if (state == null) {
                            valueState.update("1");

                            HashSet<String> orderIds = new HashSet<>();
                            orderIds.add(value.getString("order_id"));

                            out.collect(new SubjectOrderBean(
                                    "",
                                    "",
                                    value.getString("course_id"),
                                    "",
                                    "",
                                    orderIds,
                                    value.getString("user_id"),
                                    0L,
                                    0L,
                                    value.getBigDecimal("origin_amount"),
                                    value.getLong("create_time")));
                        }
                    }
                });

        //5.按照UID分组
        KeyedStream<SubjectOrderBean, String> keyedByUidDS = subjectOrderDS.keyBy(SubjectOrderBean::getUserID);

        //6.去重UID
        SingleOutputStreamOperator<SubjectOrderBean> subjectOrderByUidDS = keyedByUidDS.map(new RichMapFunction<SubjectOrderBean, SubjectOrderBean>() {

            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("uid-state", String.class);

                StateTtlConfig ttlConfig = new StateTtlConfig
                        .Builder(Time.seconds(10))
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                        .build();
                stateDescriptor.enableTimeToLive(ttlConfig);

                valueState = getRuntimeContext().getState(stateDescriptor);
            }

            @Override
            public SubjectOrderBean map(SubjectOrderBean value) throws Exception {

                //获取状态数据
                String state = valueState.value();

                if (state == null) {
                    value.setSubjectOrderUserCount(1L);
                    valueState.update("1");
                }

                return value;
            }
        });
        subjectOrderByUidDS.print("subjectOrderByUidDS-->");

        //7.提取时间戳生成Watermark
        SingleOutputStreamOperator<SubjectOrderBean> subjectOrderWithWMDS = subjectOrderByUidDS.assignTimestampsAndWatermarks(WatermarkStrategy.<SubjectOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<SubjectOrderBean>() {
            @Override
            public long extractTimestamp(SubjectOrderBean element, long recordTimestamp) {
                return element.getTs();
            }
        }));

        //8.关联维表,补充学科ID
        SingleOutputStreamOperator<SubjectOrderBean> subjectIDDS = AsyncDataStream.unorderedWait(subjectOrderWithWMDS, new DimAsyncFunction<SubjectOrderBean>("DIM_COURSE_INFO") {

            @Override
            public String getkey(SubjectOrderBean input) throws Exception {
                return input.getCourseId();
            }

            @Override
            public void join(SubjectOrderBean input, JSONObject dimInfo) throws Exception {
                input.setSubjectId(dimInfo.getString("SUBJECT_ID"));

            }
        }, 60, TimeUnit.SECONDS);

        //9.分组、开窗、聚合
        SingleOutputStreamOperator<SubjectOrderBean> reduceDS = subjectIDDS.keyBy(SubjectOrderBean::getSubjectId)
                .window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<SubjectOrderBean>() {
                    @Override
                    public SubjectOrderBean reduce(SubjectOrderBean value1, SubjectOrderBean value2) throws Exception {
                        value1.getOrderIds().addAll(value2.getOrderIds());
                        value1.setSubjectOrderUserCount(value1.getSubjectOrderUserCount() + value2.getSubjectOrderUserCount());
                        value1.setSubjectOrderAmount(value1.getSubjectOrderAmount().add(value2.getSubjectOrderAmount()));

                        return value1;
                    }
                }, new WindowFunction<SubjectOrderBean, SubjectOrderBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<SubjectOrderBean> input, Collector<SubjectOrderBean> out) throws Exception {

                        SubjectOrderBean next = input.iterator().next();

                        next.setSubjectOrderCount((long) next.getOrderIds().size());
                        next.setTs(System.currentTimeMillis());
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));

                        out.collect(next);
                    }
                });
        reduceDS.print("reduceDS-->");

        //10.关联维表,补充学科名称
        SingleOutputStreamOperator<SubjectOrderBean> resultDS = AsyncDataStream.unorderedWait(reduceDS, new DimAsyncFunction<SubjectOrderBean>("DIM_BASE_SUBJECT_INFO") {

            @Override
            public String getkey(SubjectOrderBean input) throws Exception {
                return input.getSubjectId();
            }

            @Override
            public void join(SubjectOrderBean input, JSONObject dimInfo) throws Exception {
                input.setSubjectId(dimInfo.getString("SUBJECT_NAME"));
            }
        }, 60, TimeUnit.SECONDS);

        //9.将数据写出
        resultDS.print(">>>>>>>>>");
        resultDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_subject_order_window values(?,?,?,?,?,?,?,?)"));

        //10.启动任务
        env.execute("Dws131_SubjectOrderWindow");
    }
}
