package com.atguigu.app.dws;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TradeSkuOrderBean;
import com.atguigu.func.DimAsyncJoinFunction;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickHouseUtil;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimerService;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

//数据流：web/app -> mysql -> maxwell -> kafka(ods) -> flinkApp(订单明细关联后的事实表) -> kafka(dwd) ->flinkApp -> Clickhouse(DWS)
//程 序：Mock      ->mysql -> maxwell -> kafka(zk) -> DwdTradeOrderDetail ->kafka(zk) ->Dws09TradeSkuOrderWindow(redis,hdfs,zk,hbase,phoenix)->clickhouse(zk)
/*
10.9 交易域商品粒度下单各窗口汇总表
10.9.1 主要任务
从 Kafka 订单明细主题读取数据，过滤null数据，按照唯一键去重，分组开窗聚合，统计各维度各窗口的原始金额、活动减免金额、优惠券减免金额和订单金额，补全维度信息，将数据写入 ClickHouse 交易域SKU粒度下单各窗口汇总表。

 */
//todo 1.获取执行环境
//todo 2.读取kafka订单明细主题数据创建流
//todo 3.过滤null值并转化为json对象
//todo 4.提取时间戳生成watermark
//todo 5.过滤由left join产生的重复数据
//todo 6.将数据转化为javabean对象
//todo 7.分组开窗聚合
//todo 8.关联phoenix的维表补充维度信息
//todo 9.将数据写到clickhouse
//todo 10.启动任务

//方案三：left join产生的重复数据问题：下游需求是累加类指标，既要左表也要右表累加字段，上游设置处理时间，将一条数据存入状态，利用定时器输出ts最大的值
//假设要的累加字段有左表也有右表的
//本案例假设order_amount为右表1的数据

//1001,a,b,ts2
//1001,a,null,ts1
//定时器响了，输出ts2对应的数据


public class Dws09TradeSkuOrderWindow04 {
    public static void main(String[] args) throws Exception {
        //todo 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //todo 生产环境一定要写，测试注释掉，否则每次测试都得开hdfs
//        需要从checkpoint或savepoint启动程序
//        //2.1 开启checkpoint，每隔5s钟做一次ck，并指定ck的一致性语义
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);//exactly once：默认barrier对齐
//        //2.2 设置超时时间为1min
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);//设置超时时间设置checkpoint的超时时间为1min，是指做一次checkpoint的时间；如果超时则认为本次checkpoint失败，这个checkpoint就丢了，继续一下一次checkpoint即可
//        //2.3设置两次重启的最小时间间隔为3s
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        //2.4设置任务关闭的时候保留最后一次ck数据
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        //2.5 指定从ck自动重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1L),Time.minutes(1L)
//        ));
//        //2.6 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());//本地状态位置
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/flinkCDC/220828"
//        );//checkpoint状态位置
//        //2.7 设置访问HDFS的用户名
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //todo 2.读取kafka dwd订单明细主题数据创建流（读过来的数据有null值，没有过滤）
        DataStreamSource<String> kafkaDS = env.addSource(KafkaUtil.getFlinkKafkaConsumer("dwd_trade_order_detail", "sku_order_220828"));

        //todo 3.过滤null值并转化为json对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    out.collect(JSON.parseObject(value));
                }
            }
        });

        //todo 4.提取时间戳生成watermark
        SingleOutputStreamOperator<JSONObject> jsonObjWithWMDS = jsonObjDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                String create_time = element.getString("create_time");
                                return DateFormatUtil.toTs(create_time, true);
                            }
                        })
        );

        //todo 5.过滤由left join产生的重复数据(null在上面过滤了，现在有左null和左右),并将数据转化为javabean对象
//方案三：left join产生的重复数据问题：下游需求是累加类指标，那么后到的数据将相关字段置为0写出
        SingleOutputStreamOperator<TradeSkuOrderBean> tradeSkuOrderDS = jsonObjWithWMDS.keyBy(json -> json.getString("id"))//todo 按照upsert kafka建表时的主键order_detail_id分组的
                //定时器只能用process
                .process(new KeyedProcessFunction<String, JSONObject, TradeSkuOrderBean>() {
                    private ValueState<JSONObject> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
//                        ValueStateDescriptor<JSONObject> stateDescriptor = new ValueStateDescriptor<>("order-state", JSONObject.class);
//                        StateTtlConfig ttlConfig = new StateTtlConfig.Builder(Time.seconds(10))//因为dwd order_detail表状态设置的是10s
//                                .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)//join操作有可能第一条跟第二条延迟9s，第二条跟第三条延迟9s
//                                .build();
//                        stateDescriptor.enableTimeToLive(ttlConfig);

//                        valueState = getRuntimeContext().getState(stateDescriptor);

                        //因为有定时器，就不需要ttl了，因为时间到了，我们要输出这条数据，定时器可以输出数据，也可以销毁状态，ttl只管销毁状态，到了无法输出数据
                        valueState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("order-state", JSONObject.class));
                    }

                    @Override
                    public void processElement(JSONObject value, Context ctx, Collector<TradeSkuOrderBean> out) throws Exception {
                        //取出状态数据
                        JSONObject state = valueState.value();
                        if (state == null) {
                            //如果状态为null，直接把数据更新进状态里
                            valueState.update(value);
                            //同时要注册定时器
                            TimerService timerService = ctx.timerService();
                            timerService.registerEventTimeTimer(timerService.currentWatermark() + 5000L);//5s为乱序程度,如果很多表left join产生了很多个重复数据，就要将5s调大一些
                        } else {
                            //如果状态不为null，比较当前ts与状态里的ts值
                            Long stateTs = state.getLong("ts");
                            Long curTs = value.getLong("ts");
                            //如果当前ts大就将当前数据更新到状态里
                            if (curTs > stateTs) {
                                valueState.update(value);
                            }
                        }
                    }

                    //定时器响了，会调用这个方法
                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<TradeSkuOrderBean> out) throws Exception {
                        //取出状态数据
                        JSONObject value = valueState.value();
                        out.collect(TradeSkuOrderBean.builder()
                                .originalAmount(value.getBigDecimal("split_original_amount"))
                                .orderAmount(value.getBigDecimal("split_total_amount"))
                                .build());

                        //清空状态
                        valueState.clear();


                    }

                });


        //todo 7.分组开窗聚合 (求商品粒度，即需要keyby sku_id，然后做开窗聚合运算)
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedDS = tradeSkuOrderDS.keyBy(s -> s.getSkuId())
                .window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1, TradeSkuOrderBean value2) throws Exception {
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOriginalAmount()));

                        return value1;
                    }
                }, new WindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TradeSkuOrderBean> input, Collector<TradeSkuOrderBean> out) throws Exception {
                        //获取数据
                        TradeSkuOrderBean next = input.iterator().next();
                        //补充信息
                        next.setTs(System.currentTimeMillis());
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        //输出数据
                        out.collect(next);

                    }
                });

        reducedDS.print("reduced>>>>");//订单明细表对sku_id keyBy，然后开创聚合

        //todo 8.关联phoenix的维表补充维度信息-->（也可以用richmap,要在open方法里创建连接池，连接phoenix，用来关联维表）
//        reducedDS.map(new MapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
//
//
//
//            @Override
//            public TradeSkuOrderBean map(TradeSkuOrderBean value) throws Exception {
//                //todo 获取phoenix连接
//                DruidPooledConnection phoenixConn = DruidDSUtil.getPhoenixConn();
//
//                //todo 读取phoenix信息，将维表信息补充到value上  select * from db.dim_xxx_xxx where id=''
//
//                return value;
//
//            }
//        });
        //关联sku(获取spu_id和tm_id) 异步实现的是：同时有a b c d查sku的信息，异步查，而对于a，sku和spu查询是同步的，即查完了sku才能查spu
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedWithSkuDS = AsyncDataStream.unorderedWait(
                reducedDS,
                new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_SKU_INFO") {
                    @Override
                    public String getKey(TradeSkuOrderBean input) {
                        return input.getSkuId();
                    }

                    @Override
                    public void join(TradeSkuOrderBean input, JSONObject dimInfo) {
                        input.setSkuName(dimInfo.getString("SKU_NAME"));
                        input.setSpuId(dimInfo.getString("SPU_ID"));//将SPU_ID设置进javabean里，下游就可以根据这个id查询到DIM_SPU_INFO表的对应行的信息了
                        input.setTrademarkId(dimInfo.getString("TM_ID"));
                        input.setCategory3Id(dimInfo.getString("CATEGORY3_ID"));//将CATEGORY3_ID设置进javabean里，下游就可以根据这个id查询到DIM_BASE_CATEGORY3对应行的信息了
                    }
                },
                60,
                TimeUnit.SECONDS);
        reducedWithSkuDS.print("reducedWithSkuDS>>>>");

        //关联spu
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedWithSpuDS = AsyncDataStream.unorderedWait(
                reducedWithSkuDS,//注意此时的流是关联sku_info表后的流
                new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_SPU_INFO") {
                    @Override
                    public String getKey(TradeSkuOrderBean input) {
                        return input.getSpuId();//拿上游设置进去的SPU_ID(也就是关联字段，即DIM_SPU_INFO表的主键)
                    }

                    @Override
                    public void join(TradeSkuOrderBean input, JSONObject dimInfo) {
                        input.setSpuName(dimInfo.getString("SPU_NAME"));//将SPU_NAME对应的value值补充到javabean里
                    }
                }
                , 60, TimeUnit.SECONDS);

        //关联trademark
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedWithTmDS = AsyncDataStream.unorderedWait(reducedWithSpuDS,
                new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_BASE_TRADEMARK") {
                    @Override
                    public String getKey(TradeSkuOrderBean input) {
                        return input.getTrademarkId();//用来关联BASE_TRADEMARK表的关联字段，也是base_trademark维表的主键
                    }

                    @Override
                    public void join(TradeSkuOrderBean input, JSONObject dimInfo) {
                        input.setTrademarkName(dimInfo.getString("TM_NAME"));

                    }

                }, 60, TimeUnit.SECONDS);
        //关联category3（categery3、2、1关联的顺序不能变!!!!!）----要把CATEGORY2_ID设置进去，用于下游关联c2表获取相应行的信息
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedWithC3DS = AsyncDataStream.unorderedWait(reducedWithTmDS, new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_BASE_CATEGORY3") {
            @Override
            public String getKey(TradeSkuOrderBean input) {
                return input.getCategory3Id();//获取上游的c3_id,根据这个维表主键（关联字段）去查询c3的信息
            }

            @Override
            public void join(TradeSkuOrderBean input, JSONObject dimInfo) {
                input.setCategory3Name(dimInfo.getString("NAME"));//即c3 id对应的name
                input.setCategory2Id(dimInfo.getString("CATEGORY2_ID"));//要将查询出来的c2_id设置进去，下游c2通过这个id去关联获取对应行的信息

            }
        }, 60, TimeUnit.SECONDS);

        //关联category2
        SingleOutputStreamOperator<TradeSkuOrderBean> reducedWithC2DS = AsyncDataStream.unorderedWait(reducedWithC3DS, new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_BASE_CATEGORY2") {
            @Override
            public String getKey(TradeSkuOrderBean input) {
                return input.getCategory2Id();//获取上游设置的c2_id,根据这个维表主键（关联字段）去查询c2的信息
            }

            @Override
            public void join(TradeSkuOrderBean input, JSONObject dimInfo) {

                input.setCategory2Name(dimInfo.getString("NAME"));//将C2的NAME字段从redis或phoenix查询出来补充个进javabean里
                input.setCategory1Id(dimInfo.getString("CATEGORY1_ID"));//将查出来的CATEGORY1_ID设置进javabean里，用于跟下游c1表关联
            }
        }, 60, TimeUnit.SECONDS);


        //关联category1

        SingleOutputStreamOperator<TradeSkuOrderBean> resultDS = AsyncDataStream.unorderedWait(reducedWithC2DS, new DimAsyncJoinFunction<TradeSkuOrderBean>("DIM_BASE_CATEGORY1") {
            @Override
            public String getKey(TradeSkuOrderBean input) {
                return input.getCategory1Id();//获取上游设置进去的c1_id用来跟DIM_BASE_CATEGORY1关联获取相应行的信息
            }

            @Override
            public void join(TradeSkuOrderBean input, JSONObject dimInfo) {

                input.setCategory1Name(dimInfo.getString("NAME"));//将查询的c1表的NAME取出来补充大JavaBean里

            }
        }, 60, TimeUnit.SECONDS);

        resultDS.print("多表关联后的商品粒度信息");

        //todo 9.将数据写到clickhouse
        resultDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_trade_sku_order_window values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"));
        //todo 10.启动任务
        env.execute("Dws09TradeSkuOrderWindow");
    }
}
