package com.bw.gmall.realtime.Day0923;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.bean.TradeUserSpuOrderBean;
import com.bw.gmall.realtime.utils.DateFormatUtil;
import com.bw.gmall.realtime.utils.DruidDSUtil;
import com.bw.gmall.realtime.utils.JdbcUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashSet;
import java.util.List;

/*
*dbcUtil.queryList     结合redis  做旁路缓存
*
          异步IO
          多线程
          *
* */
public class DwsTradeUserSpuOrderWindow_a {

    public static void main(String[] args) throws Exception {

     //spuid   tmid   a3   a2  a1    订单数   订单综合金额

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        //2.设置并行度
        env.setParallelism(1);

        //3.设置检查点


        //4.加载数据
        DataStreamSource<String> streamSource =
                env.addSource(MyKafkaUtil.getFlinkKafkaConsumer("dwd_trade_order_detail", "DwsTradeUserSpuOrderWindow_axxx"));

        //5. 转换成json数据
        SingleOutputStreamOperator<JSONObject> map = streamSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                out.collect(JSONObject.parseObject(value));
            }
        });
        //6.保留第一条   保order_deatil_id   order_info_id
        SingleOutputStreamOperator<JSONObject> jsonObjectSingleOutputStreamOperator = map.keyBy(a -> a.getString("id"))
                .process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                    ValueState<String> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("valueState", String.class);
                        valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(5l))
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).
                                        build());
                        valueState = getRuntimeContext().getState(valueStateDescriptor);
                    }

                    @Override
                    public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                        String v = valueState.value();

                        if (v == null) {
                            out.collect(value);
                            valueState.update("a");
                        }


                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(10l))
                        .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                return DateFormatUtil.toTs(element.getString("create_time"), true);
                            }
                        }));

        //7.keyby   spuid   tmid   a3   a2  a1    订单数   订单综合金额
        SingleOutputStreamOperator<TradeUserSpuOrderBean> wTMap = jsonObjectSingleOutputStreamOperator.map(new MapFunction<JSONObject, TradeUserSpuOrderBean>() {
            @Override
            public TradeUserSpuOrderBean map(JSONObject value) throws Exception {
                String sku_id = value.getString("sku_id");
                String user_id = value.getString("user_id");
                String order_id = value.getString("order_id");
                Double split_total_amount = value.getDouble("split_total_amount");
                HashSet<String> set = new HashSet<>();
                set.add(order_id);

                return TradeUserSpuOrderBean.builder()
                        .orderIdSet(set)
                        .skuId(sku_id)
                        .userId(user_id)
                        .orderAmount(split_total_amount)
                        .build();
            }
        });
        SingleOutputStreamOperator<TradeUserSpuOrderBean> wTMapTwo = wTMap.map(new RichMapFunction<TradeUserSpuOrderBean, TradeUserSpuOrderBean>() {
            DruidDataSource druidDataSource = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                druidDataSource = DruidDSUtil.createDataSource();
            }

            @Override
            public TradeUserSpuOrderBean map(TradeUserSpuOrderBean value) throws Exception {
                DruidPooledConnection connection = druidDataSource.getConnection();

                String sku_id = value.getSkuId();

                List<JSONObject> list = JdbcUtil.queryList(connection,
                        "select  *  from   GMALL_REALTIME.DIM_SKU_INFO  where  id='" + sku_id + "'",
                        JSONObject.class, true);
                JSONObject jsonObject = list.get(0);
                System.out.println(jsonObject);
                String spuId = jsonObject.getString("spuId");
                String tmId = jsonObject.getString("tmId");
                String category3Id = jsonObject.getString("category3Id");
                value.setTrademarkId(tmId);
                value.setCategory3Id(category3Id);
                value.setSpuId(spuId);
                return value;
            }
        });
        wTMapTwo.print("主题数据--->");
        //8.方便开窗聚合
        SingleOutputStreamOperator<TradeUserSpuOrderBean> reduce = wTMapTwo.keyBy(new KeySelector<TradeUserSpuOrderBean, Tuple4<String, String, String, String>>() {
            @Override
            public Tuple4<String, String, String, String> getKey(TradeUserSpuOrderBean value) throws Exception {
                return new Tuple4<>(
                        value.getUserId(),
                        value.getSpuId(),
                        value.getTrademarkId(),
                        value.getCategory3Id());
            }
        }).window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10l)))
                .reduce(new ReduceFunction<TradeUserSpuOrderBean>() {
                    @Override
                    public TradeUserSpuOrderBean reduce(TradeUserSpuOrderBean value1, TradeUserSpuOrderBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());

                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        return value1;
                    }
                }, new ProcessWindowFunction<TradeUserSpuOrderBean, TradeUserSpuOrderBean, Tuple4<String, String, String, String>, TimeWindow>() {


                    @Override
                    public void process(Tuple4<String, String, String, String> stringStringStringStringTuple4,
                                        Context context, Iterable<TradeUserSpuOrderBean> input, Collector<TradeUserSpuOrderBean> out) throws Exception {
                        TradeUserSpuOrderBean userSpuOrderBean = input.iterator().next();

                        userSpuOrderBean.setTs(System.currentTimeMillis());
                        userSpuOrderBean.setOrderCount((long) userSpuOrderBean.getOrderIdSet().size());
                        userSpuOrderBean.setStt(DateFormatUtil.toYmdHms(context.window().getStart()));
                        userSpuOrderBean.setEdt(DateFormatUtil.toYmdHms(context.window().getEnd()));

                        out.collect(userSpuOrderBean);
                    }
                });

        reduce.print("------------->");

        //关联维度  spuName
        //关联维度  tmName
        //关联维度  userName
        //关联维度  a3Name
        //关联维度  a2Id
        //关联维度  a1Id


        env.execute();


    }

}