package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.dws.func.AbsDimAsyncFuntion;
import com.atguigu.gmall.realtime.bean.TradeProvinceOrderBean;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.util.ClickhouseUtil;
import com.atguigu.gmall.realtime.util.DateFormatUtil;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

public class DwsTradeProvinceOrderWindow {


    //1 数据库： dwd层订单明细主题
    //2 转换数据结构  jsonobj (方便提取)
    //3 keyby   order_detail_id
    //4 解决因为上游回撤流产生的数据重复问题
    //5   转为Bean, 以clickhouse表结构 为依据
    //6   水位线
    //7  keyby  province_id
    //8  开窗     滚动窗口  聚合订单数量和金额 ， 订单金额直接加总即可， 但是订单个数如何统计？
    //9 状态 把数据保存下来 ，判断如果来过状态有数据，把历史数据做负值处理，实现数据的回撤。
    //10  关联维度表    province_id  关联省份表   旁路缓存查询+AsyncIO
    //11  合并省份名称


    //12  写入clickhouse
    public static void main(String[] args) throws Exception {
        //1 数据库： dwd层订单明细主题
        //2 转换数据结构  jsonobj (方便提取)

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        String sourceTopic="dwd_trade_order_detail";
        String groupId="dws_trade_province_order_window";
        DataStreamSource<String> kafkaStream = env.addSource(MyKafkaUtil.getKafkaConsumer(sourceTopic, groupId));
        SingleOutputStreamOperator<JSONObject> jsonObjStream = kafkaStream.map(JSON::parseObject);


        //3 keyby   order_detail_id
        KeyedStream<JSONObject, String> detailIdKeyedStream = jsonObjStream.keyBy(jsonObj -> jsonObj.getString("id"));//订单明细id


        //4 解决因为上游回撤流产生的数据重复问题
        SingleOutputStreamOperator<JSONObject> orderDetailFilteredJsonObj = detailIdKeyedStream.process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
            // 上次的访问数据
            ValueStateDescriptor<JSONObject> lastOrderDetailJsonObjStateDesc = new ValueStateDescriptor<JSONObject>("last_order_detail_jsonobj", JSONObject.class);

            ValueState<JSONObject> lastOrderDetailJsonObjState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                lastOrderDetailJsonObjState = getRuntimeContext().getState(lastOrderDetailJsonObjStateDesc);
            }

            @Override
            public void processElement(JSONObject newJsonObject, KeyedProcessFunction<String, JSONObject, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                // 判断是否有上次访问状态，如果有 生成一个反向数值的操作  放入收集器
                // 新的数据写入状态  放入收集器
                if (lastOrderDetailJsonObjState.value() != null && lastOrderDetailJsonObjState.value().size() > 0) {
                    JSONObject lastOrderJsonObj = lastOrderDetailJsonObjState.value();
                    BigDecimal splitTotalAmountNg = lastOrderJsonObj.getBigDecimal("split_total_amount").negate();
                    lastOrderJsonObj.put("split_total_amount", splitTotalAmountNg);
                    out.collect(lastOrderJsonObj);
                }
                lastOrderDetailJsonObjState.update(newJsonObject);
                out.collect(newJsonObject);
            }
        });

        //5   转为Bean, 以clickhouse表结构 为依据

        SingleOutputStreamOperator<TradeProvinceOrderBean> tradeProvinceOrderStream = orderDetailFilteredJsonObj.map(new MapFunction<JSONObject, TradeProvinceOrderBean>() {
            @Override
            public TradeProvinceOrderBean map(JSONObject jsonObject) throws Exception {
                HashSet<Object> orderIdSet = new HashSet<>();
                orderIdSet.add(jsonObject.getString("order_id"));
                TradeProvinceOrderBean tradeProvinceOrderBean = TradeProvinceOrderBean.builder()
                        .provinceId(jsonObject.getString("province_id"))
                        .orderIdSet(orderIdSet)
                        .orderAmount(jsonObject.getBigDecimal("split_total_amount"))
                        .ts(jsonObject.getLong("ts"))
                        .build();
                return tradeProvinceOrderBean;
            }
        });

       // tradeProvinceOrderStream.print();

        //6   水位线
        SingleOutputStreamOperator<TradeProvinceOrderBean> tradeProvinceOrderWmStream = tradeProvinceOrderStream.assignTimestampsAndWatermarks(WatermarkStrategy.<TradeProvinceOrderBean>forBoundedOutOfOrderness(Duration.ofMillis(1000)).withIdleness(Duration.ofMillis(10000))
                .withTimestampAssigner(new SerializableTimestampAssigner<TradeProvinceOrderBean>() {
                    @Override
                    public long extractTimestamp(TradeProvinceOrderBean bean, long recordTimestamp) {
                        return bean.getTs()*1000;
                    }
                }));

        //7 keyby province_id
        KeyedStream<TradeProvinceOrderBean, String> provincekeyedStream = tradeProvinceOrderWmStream.keyBy(new KeySelector<TradeProvinceOrderBean, String>() {
            @Override
            public String getKey(TradeProvinceOrderBean bean) throws Exception {
                return bean.getProvinceId();
            }
        });

        //8 开窗
        SingleOutputStreamOperator<TradeProvinceOrderBean> reduceStream = provincekeyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10))).allowedLateness(Time.seconds(10))
                .reduce(new ReduceFunction<TradeProvinceOrderBean>() {
                    @Override
                    public TradeProvinceOrderBean reduce(TradeProvinceOrderBean bean1, TradeProvinceOrderBean bean2) throws Exception {
                        bean1.setOrderAmount(bean1.getOrderAmount().add(bean2.getOrderAmount()));
                        bean1.getOrderIdSet().addAll(bean2.getOrderIdSet());  //订单编号去重
                        return bean1;
                    }
                }, new WindowFunction<TradeProvinceOrderBean, TradeProvinceOrderBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TradeProvinceOrderBean> beanItr, Collector<TradeProvinceOrderBean> out) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(window.getStart());
                        String edt = DateFormatUtil.toYmdHms(window.getEnd());
                        for (TradeProvinceOrderBean bean : beanItr) {
                            bean.setStt(stt);
                            bean.setEdt(edt);
                            bean.setTs(System.currentTimeMillis());
                            bean.setOrderCount(bean.getOrderIdSet().size() + 0L);
                            out.collect(bean);
                        }
                    }
                });

        //9 维度 base_province
        SingleOutputStreamOperator<TradeProvinceOrderBean> tradeProvinceWithNameStream = AsyncDataStream.unorderedWait(reduceStream, new AbsDimAsyncFuntion<TradeProvinceOrderBean>("dim_base_province") {
            @Override
            public String getJoinId(TradeProvinceOrderBean bean) {
                return bean.getProvinceId();
            }

            @Override
            public void join(TradeProvinceOrderBean bean, JSONObject dimObject) {
                bean.setProvinceName(dimObject.getString("name"));
            }
        }, 30, TimeUnit.SECONDS, 100);


        // tradeProvinceWithNameStream.print();


        tradeProvinceWithNameStream.addSink(ClickhouseUtil.getSink("insert into dws_trade_province_order_window values (?,?,?,?,?,?,?)"));

        env.execute();
    }
}
