package mn10;


import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.app.func.DimAsyncFunction;
import com.bw.gmall.realtime.utils.DateFormatUtil;
import com.bw.gmall.realtime.utils.JedisUtil;
import com.bw.gmall.realtime.utils.MyClickHouseUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;


import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

public class DemoTest03 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

//        交易域下单事务事实表：关联订单明细表和订单表和维度业务表形成订单明细表，写入 Kafka 主题。
//        1）、编写Flink流式程序，从ODS层实时消费业务数据，过滤获取订单相关数据（order_info和order_detail），设置数据时间字段值，及允许乱序最大水位线为2秒；（5分）
        DataStreamSource<String> dataStreamSource = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer("topic_db", "topic_db_xxx"));


        OutputTag<JSONObject> order_info = new OutputTag<JSONObject>("order_info") {
        };
        OutputTag<JSONObject> order_detail = new OutputTag<JSONObject>("order_detail") {
        };

        SingleOutputStreamOperator<JSONObject> processDataStreamSource = dataStreamSource.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {

                JSONObject jsonObject = JSONObject.parseObject(value);
                String table = jsonObject.getString("table");
                if(table.equals("order_info")){
                    ctx.output(order_info,jsonObject);
                }else if (table.equals("order_detail")){
                    ctx.output(order_detail,jsonObject);
                }else{
                    out.collect(jsonObject);
                }
            }
        });

//        2）、采用Interval Join间隔关联方式，将订单明细数据流order_detail和订单数据数据流order_info进行关联，时间间隔范围为10秒；（5分）
        SingleOutputStreamOperator<JSONObject> order_detailDSW = processDataStreamSource.getSideOutput(order_detail).assignTimestampsAndWatermarks(WatermarkStrategy.
                <JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2l))
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                        return element.getJSONObject("data").getLong("create_time");
                    }
                }));

        SingleOutputStreamOperator<JSONObject> order_infoDSW = processDataStreamSource.getSideOutput(order_info).assignTimestampsAndWatermarks(WatermarkStrategy.
                <JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2l))
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                        return element.getJSONObject("data").getLong("create_time");
                    }
                }));

        /*on */
        SingleOutputStreamOperator<JSONObject> streamOperator = order_infoDSW.keyBy(a -> a.getJSONObject("data").getString("id"))
                .intervalJoin(order_detailDSW.keyBy(a -> a.getJSONObject("data").getString("order_id")))
                .between(Time.seconds(-10), Time.seconds(10))
                .process(new ProcessJoinFunction<JSONObject, JSONObject, JSONObject>() {
                    @Override
                    public void processElement(JSONObject left, JSONObject right, Context ctx, Collector<JSONObject> out) throws Exception {
                        JSONObject jsonObject = new JSONObject();


                        String user_id= left.getJSONObject("data").getString("user_id");
                        String province_id= left.getJSONObject("data").getString("province_id");
                        String order_status= left.getJSONObject("data").getString("order_status");
                        String id= right.getJSONObject("data").getString("id");
                        String order_id= right.getJSONObject("data").getString("order_id");
                        String sku_id= right.getJSONObject("data").getString("sku_id");
                        String sku_name= right.getJSONObject("data").getString("sku_name");
                        String sku_num= right.getJSONObject("data").getString("sku_num");
                        String create_time= right.getJSONObject("data").getString("create_time");
                        String split_total_amount= right.getJSONObject("data").getString("split_total_amount");
                        jsonObject.put("id", id                    );
                        jsonObject.put("order_id", order_id);
                        jsonObject.put("user_id", user_id);
                        jsonObject.put("sku_id", sku_id       );
                        jsonObject.put("sku_name", sku_name                      );
                        jsonObject.put("sku_num", sku_num      );
                        jsonObject.put("create_time", create_time           );
                        jsonObject.put("split_total_amount", split_total_amount);
                        jsonObject.put("province_id", province_id);
                        jsonObject.put("order_status", order_status);
                        jsonObject.put("id", id);
                        jsonObject.put("user_id", user_id);
                        jsonObject.put("province_id", province_id);
                        jsonObject.put("order_status", order_status);
                        out.collect(jsonObject);
                    }
                });
//        3）、异步（或同步）关联Redis数据库维度表数据，进行拉宽操作，实时存储数据Kafka队列：dwd_trade_orders。（5分）
//        备注：订单业务数据，关联维度后，最终拉宽表数据，可以时json字符串，也可以时text文本数据。
        SingleOutputStreamOperator<JSONObject> unorderedWait = AsyncDataStream.unorderedWait(streamOperator,
                new DimAsyncFunction<JSONObject>("DIM_SKU_INFO") {
            /*
             * select  *  from    DIM_SKU_INFO  where id=1
             * {}
             *
             * */
            @Override
            public String getKey(JSONObject input) {
                return input.getString("sku_id");
            }

            @Override
            public void join(JSONObject input, JSONObject dimInfo) {
                String spu_id = dimInfo.getString("SPU_ID");
                input.put("spu_id", spu_id);

            }
        }, 1000, TimeUnit.SECONDS);
//        unorderedWait.print("------------------>");
//        从 Kafka 订单明细主题读取数据，分组开窗聚合，统计各维度（省份、商品、用户）各窗口的订单数、
//        订单金额、商品数量，补全维度信息，
//        将数据写入 ClickHouse 交易域SKU粒度下单各窗口汇总表。
//         stt  edt  province_id  sku_id  user_id  order_count   total_count_money  sku_num
        SingleOutputStreamOperator<SkuUserBean> flatMap = unorderedWait.flatMap(new FlatMapFunction<JSONObject, SkuUserBean>() {
            @Override
            public void flatMap(JSONObject value, Collector<SkuUserBean> out) throws Exception {

                HashSet<Long> sets = new HashSet<>();
                sets.add(value.getLong("order_id"));
                String create_time = value.getString("create_time");
                Long ts = DateFormatUtil.toTs(create_time, true);
                out.collect(new SkuUserBean(
                        null,
                        null,
                        value.getLong("province_id") ,
                        value.getLong("sku_id") ,
                        value.getLong("user_id") ,
                        sets,
                        0l,
                        value.getDouble("split_total_amount"),
                        value.getLong("sku_num"),
                        ts



                ));
            }
        });
        SingleOutputStreamOperator<SkuUserBean> flatMapW = flatMap.assignTimestampsAndWatermarks(
                WatermarkStrategy.<SkuUserBean>forBoundedOutOfOrderness(Duration.ofSeconds(1l))
                        .withTimestampAssigner(new SerializableTimestampAssigner<SkuUserBean>() {
                            @Override
                            public long extractTimestamp(SkuUserBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }));
//        设置事件时间窗口为1天，每隔1秒触发窗口计算，实时累加统计各个维度的指标度量中（订单数、订单金额、商品数据量）；（5分）
//        KeyedStream<SkuUserBean, Tuple3<Long, Long, Long>> keyBy = flatMapW.keyBy(new KeySelector<SkuUserBean, Tuple3<Long, Long, Long>>() {
//            @Override
//            public Tuple3<Long, Long, Long> getKey(SkuUserBean value) throws Exception {
//                return Tuple3.of(value.getProvince_id(), value.getSku_id(), value.getUser_id());
//            }
//        });
//
//        SingleOutputStreamOperator<SkuUserBean> reduce = keyBy.window(SlidingEventTimeWindows.of(Time.days(1l), Time.seconds(1l)))
//                .reduce(new ReduceFunction<SkuUserBean>() {
//                    @Override
//                    public SkuUserBean reduce(SkuUserBean value1, SkuUserBean value2) throws Exception {
//                        HashSet<Long> sets = value1.getSets();
//                        sets.addAll(value2.getSets());
//
//
//                        value1.setSku_num(value1.getSku_num() + value2.getSku_num());
//                        value1.setTotal_count_money(value1.getTotal_count_money() + value2.getTotal_count_money());
//
//                        return value1;
//                    }
//                }, new ProcessWindowFunction<SkuUserBean, SkuUserBean, Tuple3<Long, Long, Long>, TimeWindow>() {
//                    @Override
//                    public void process(Tuple3<Long, Long, Long> longLongLongTuple3, Context context,
//                                        Iterable<SkuUserBean> elements, Collector<SkuUserBean> out) throws Exception {
//                        SkuUserBean next = elements.iterator().next();
//
//                        //获取当前时间
//                        long ts = System.currentTimeMillis();
//                        //获取开始时间和结束时间
//                        String start = DateFormatUtil.toYmdHms(context.window().getStart());
//                        String end = DateFormatUtil.toYmdHms(context.window().getEnd());
//                        //获取个数
//                        int size = next.getSets().size();
//
//                        next.setOrder_count((long) size);
//
//                        next.setTs(ts);
//                        next.setStt(start);
//                        next.setEdt(end);
//                        out.collect(next);
//
//                    }
//                });

        KeyedStream<SkuUserBean, Tuple3<Long, Long, Long>> keyBy = flatMapW.keyBy(new KeySelector<SkuUserBean, Tuple3<Long, Long, Long>>() {
            @Override
            public Tuple3<Long, Long, Long> getKey(SkuUserBean value) throws Exception {
                return Tuple3.of(value.getProvince_id(), value.getSku_id(), value.getUser_id());
            }
        });

        SingleOutputStreamOperator<SkuUserBean> reduce =
                keyBy.window(TumblingEventTimeWindows.of(Time.seconds(10l)))
                .reduce(new ReduceFunction<SkuUserBean>() {
                    @Override
                    public SkuUserBean reduce(SkuUserBean value1, SkuUserBean value2) throws Exception {
                        HashSet<Long> sets = value1.getSets();
                        sets.addAll(value2.getSets());


                        value1.setSku_num(value1.getSku_num() + value2.getSku_num());
                        value1.setTotal_count_money(value1.getTotal_count_money() + value2.getTotal_count_money());

                        return value1;
                    }
                }, new ProcessWindowFunction<SkuUserBean, SkuUserBean, Tuple3<Long, Long, Long>, TimeWindow>() {
                    @Override
                    public void process(Tuple3<Long, Long, Long> longLongLongTuple3, Context context,
                                        Iterable<SkuUserBean> elements, Collector<SkuUserBean> out) throws Exception {
                        SkuUserBean next = elements.iterator().next();

                        //获取当前时间
                        long ts = System.currentTimeMillis();
                        //获取开始时间和结束时间
                        String start = DateFormatUtil.toYmdHms(context.window().getStart());
                        String end = DateFormatUtil.toYmdHms(context.window().getEnd());
                        //获取个数
                        int size = next.getSets().size();

                        next.setOrder_count((long) size);

                        next.setTs(ts);
                        next.setStt(start);
                        next.setEdt(end);
                        out.collect(next);

                    }
                });

        reduce.print("--------->");
//        将上述交易域SKU粒度下单各窗口汇总结果数据，实时存储Clickhouse表，
//        其中表的引擎ReplacingMergeTree主键相同时，更新字段值；（2分）
        reduce.addSink(MyClickHouseUtil.getSinkFunction("insert  into   sku_user values(?,?,?,?,?,?,?,?,?)"));


        env.execute();
    }
}
