package com.atguigu.edu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.beans.TradeProvinceOrderBean;
import com.atguigu.edu.realtime.utils.DateFormatUtil;
import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * @ClassName DwsTradeProvinceOrderWindow
 * @Description TODO
 * @Author$ 邢家俊
 * @Date 2023-5-8 17:43
 * @Version 1.0
 **/
public class DwsTradeProvinceOrderWindow {
    public static void main(String[] args) throws Exception {
        //基本环境的准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);
        //设置检查点,精确一次
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        //1.设置检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(6000L);
//        //2.设置两个检查点之间的最小时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        //3.设置job结束后,检查点的是否取消(取消保留)
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        //4.设置检查点的重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
//        //5.设置状态后端
//        env.setStateBackend(new HashMapStateBackend());
//        //6.设置检查点的存储路径
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        //7.设置操作的用户
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //从kafka 的 dwd_trade_order_detail主题中读取数据
        String topic="dwd_trade_order_detail";
        String groupId="dws_trade_province_order_groupId";
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        DataStreamSource<String> streamSourceDS = env.addSource(kafkaConsumer);
        //将kafka中读取的数据进行过滤, jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> flatMapDS = streamSourceDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String jsonStr, Collector<JSONObject> out) throws Exception {

                if (StringUtils.isNotEmpty(jsonStr)) {
                    JSONObject jsonObj = JSON.parseObject(jsonStr);
                    out.collect(jsonObj);
                }
            }
        });
        flatMapDS.print();

        KeyedStream<JSONObject, String> keybyIdDS = flatMapDS.keyBy(jsonObj -> jsonObj.getString("order_id"));
        // 状态覆盖
        SingleOutputStreamOperator<JSONObject> processstateDS = keybyIdDS.process(new ProcessFunction<JSONObject, JSONObject>() {
            private ValueState<JSONObject> LastValueState;
            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<JSONObject> lastValueStateDescriptor = new ValueStateDescriptor<>("LastValueState", JSONObject.class);
                LastValueState = getRuntimeContext().getState(lastValueStateDescriptor);
            }
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                JSONObject LastValueJsonObj = LastValueState.value();
                if (LastValueJsonObj != null) {
                    String splitTotalAmount = LastValueJsonObj.getString("split_total_amount");
                    LastValueJsonObj.put("split_total_amount", "-" + splitTotalAmount);
                    out.collect(LastValueJsonObj);
                }
                out.collect(value);
            }
        });
        //{"create_time":"2023-04-25 17:59:09","sku_num":"1","split_original_amount":"69.0000",
        // "split_coupon_amount":"0.0","sku_id":"29",
        // "date_id":"2023-04-25","user_id":"33","province_id":"11",
        // "sku_name":"CAREMiLLE珂曼奶油小方口红 雾面滋润保湿持久丝缎唇膏 M01醉蔷薇",
        // "id":"14242510","order_id":"43925","split_activity_amount":"0.0","split_total_amount":"69.0","ts":"1682503149"}
        //对jsonObj流数据进行分组 keyby->order detail _id
        SingleOutputStreamOperator<TradeProvinceOrderBean> jsonToTradeProvinceOrderBeanDS = processstateDS.map(new MapFunction<JSONObject, TradeProvinceOrderBean>() {
            @Override
            public TradeProvinceOrderBean map(JSONObject jsonObj) throws Exception {
                String provinceId = jsonObj.getString("province_id");
                String orderId = jsonObj.getString("order_id");
                String splitTotalAmount = jsonObj.getString("split_total_amount");
                long ts = jsonObj.getLong("ts") * 1000;
                return TradeProvinceOrderBean.builder()
                        .provinceId(provinceId)
                        .orderAmount(new BigDecimal(splitTotalAmount))
                        .orderIdSet(new HashSet<String>(Collections.singleton(orderId)))
                        .ts(ts)
                        .build();
            }
        });
        //设置水位线
        SingleOutputStreamOperator<TradeProvinceOrderBean> watermarksDS = jsonToTradeProvinceOrderBeanDS.assignTimestampsAndWatermarks(
                WatermarkStrategy.<TradeProvinceOrderBean>forMonotonousTimestamps()
                        .withTimestampAssigner(new SerializableTimestampAssigner<TradeProvinceOrderBean>() {
                            @Override
                            public long extractTimestamp(TradeProvinceOrderBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }));
        //开窗 按照省份id 进行分组
        KeyedStream<TradeProvinceOrderBean, String> provinceOrderBeanStringKeyedStreamDS = watermarksDS.keyBy(TradeProvinceOrderBean::getProvinceId);
        //开窗
        WindowedStream<TradeProvinceOrderBean, String, TimeWindow> windowWindowedStreamDS = provinceOrderBeanStringKeyedStreamDS.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));
        //聚合
        SingleOutputStreamOperator<TradeProvinceOrderBean> reduceDS = windowWindowedStreamDS.reduce(new ReduceFunction<TradeProvinceOrderBean>() {
            @Override
            public TradeProvinceOrderBean reduce(TradeProvinceOrderBean value1, TradeProvinceOrderBean value2) throws Exception {
                value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                return value1;
            }
        }, new WindowFunction<TradeProvinceOrderBean, TradeProvinceOrderBean, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<TradeProvinceOrderBean> input, Collector<TradeProvinceOrderBean> out) throws Exception {
                String stt = DateFormatUtil.toYmdHms(window.getStart());
                String edt = DateFormatUtil.toYmdHms(window.getEnd());
                for (TradeProvinceOrderBean tradeProvinceOrderBean : input) {
                    tradeProvinceOrderBean.setStt(stt);
                    tradeProvinceOrderBean.setEdt(edt);
                    tradeProvinceOrderBean.setTs(System.currentTimeMillis());
                    tradeProvinceOrderBean.setOrderCount((long) tradeProvinceOrderBean.getOrderIdSet().size());
                    out.collect(tradeProvinceOrderBean);
                }
            }
        });
        //维度聚合
        SingleOutputStreamOperator<TradeProvinceOrderBean> unorderedWaitProvinceNameDS = AsyncDataStream.unorderedWait(reduceDS, new myAsyncFuntion<TradeProvinceOrderBean>("dim_base_province".toUpperCase()) {
                    @Override
                    public void join(TradeProvinceOrderBean obj, JSONObject dimInfo) {
                        String name = dimInfo.getString("NAME");
                        obj.setProvinceName(name);
                    }

                    @Override
                    public String getkey(TradeProvinceOrderBean obj) {
                        return obj.getProvinceId();
                    }
                }, 60, TimeUnit.SECONDS
        );
        unorderedWaitProvinceNameDS.print();


        unorderedWaitProvinceNameDS.addSink(ClickHouseUtil.getJdbcSink( "insert into dws_trade_province_order_window values(?,?,?,?,?,?,?)"));


        //执行流环境
        env.execute();
    }
}