package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.bean.TradeOrderBean;
import com.atguigu.gmall.realtime.util.DateFormatUtil;
import com.atguigu.gmall.realtime.util.MyClickHouseUtil;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * Author: Felix
 * Date: 2022/6/22
 * Desc: 交易域-下单首单用户以及独立用户统计
 * 需要启动的进程
 *      zk、kafka、maxwell、clickhouse、
 *      DwdTradeOrderPreProcess、DwdTradeOrderDetail、DwsTradeOrderWindow
 * 执行流程
 *      运行模拟生成业务数据的jar包
 *      向业务数据库插入数据
 *      binlog会记录业务数据库的变化
 *      maxwell从binlog中读取变化数据并封装为json字符串发送给kafka的topic_db
 *      DwdTradeOrderPreProcess应用从topic_db主题中读取订单、订单明细、订单明细活动、订单明细优惠券并和字典表进行关联，写到预处理主题中
 *      DwdTradeOrderDetail应用从订单预处理主题中读取数据，将insert操作过滤出来，得到的就是下单明细数据，写到下单主题中
 *      DwsTradeOrderWindow应用从下单主题中读取下单数据，并从中过滤出独立用户以及首单用户
 */
public class DwsTradeOrderWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);

        //TODO 2.检查点相关设置(略)

        //TODO 3.从kafka主题中读取数据
        //3.1 声明消费的主题以及消费者组
        String topic = "dwd_trade_order_detail";
        String groupId = "dws_trade_order_detail_group";
        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        //3.3 消费数据  封装为流
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);

        //TODO 4.对读取的数据进行类型转换       jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(JSON::parseObject);

        /*{"create_time":"2022-06-22 16:37:00","sku_num":"3","split_original_amount":"69.0000","sku_id":"25",
        "date_id":"2022-06-22","source_type_name":"商品推广","user_id":"74","province_id":"31","source_type_code":"2402",
        "row_op_ts":"2022-06-22 08:36:59.887Z","sku_name":"金沙河面条 银丝挂面900g*3包 爽滑 细面条 龙须面 速食面","id":"494",
        "source_id":"79","order_id":"227","split_total_amount":"69.0","ts":"1655887020"}*/
        //jsonObjDS.print(">>>>");

        //TODO 5.指定Watermark以及提取事件时间字段
        SingleOutputStreamOperator<JSONObject> jsonObjWithWatermarkDS = jsonObjDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<JSONObject>() {
                        @Override
                        public long extractTimestamp(JSONObject jsonObj, long recordTimestamp) {
                            return jsonObj.getLong("ts") * 1000;
                        }
                    }
                )
        );

        //TODO 6.按照用户id进行分组
        KeyedStream<JSONObject, String> keyedDS = jsonObjWithWatermarkDS.keyBy(jsonObj -> jsonObj.getString("user_id"));

        //TODO 7.使用Flink的状态编程  判断独立用户以及首单用户
        SingleOutputStreamOperator<TradeOrderBean> processDS = keyedDS.process(
            new KeyedProcessFunction<String, JSONObject, TradeOrderBean>() {
                private ValueState<String> lastOrderDateState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    lastOrderDateState
                        = getRuntimeContext().getState(new ValueStateDescriptor<String>("lastOrderDateState", String.class));
                }

                @Override
                public void processElement(JSONObject jsonObj, Context ctx, Collector<TradeOrderBean> out) throws Exception {
                    //上次下单日期
                    String lastOrderDate = lastOrderDateState.value();
                    //当前下单日期
                    long ts = jsonObj.getLong("ts") * 1000;
                    String curOrderDate = DateFormatUtil.toDate(ts);

                    Long uuCt = 0L;
                    Long newCt = 0L;
                    if (StringUtils.isEmpty(lastOrderDate)) {
                        uuCt = 1L;
                        newCt = 1L;
                        lastOrderDateState.update(curOrderDate);
                    } else {
                        if (!lastOrderDate.equals(curOrderDate)) {
                            uuCt = 1L;
                            lastOrderDateState.update(curOrderDate);
                        }
                    }

                    if (uuCt != 0L || newCt != 0L) {
                        out.collect(new TradeOrderBean("", "", uuCt, newCt, ts));
                    }
                }
            }
        );

        //TODO 8.开窗
        AllWindowedStream<TradeOrderBean, TimeWindow> windowDS = processDS.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)));

        //TODO 9.聚合计算
        SingleOutputStreamOperator<TradeOrderBean> reduceDS = windowDS.reduce(
            new ReduceFunction<TradeOrderBean>() {
                @Override
                public TradeOrderBean reduce(TradeOrderBean value1, TradeOrderBean value2) throws Exception {
                    value1.setOrderUniqueUserCount(value1.getOrderUniqueUserCount() + value2.getOrderUniqueUserCount());
                    value1.setOrderNewUserCount(value1.getOrderNewUserCount() + value2.getOrderNewUserCount());
                    return value1;
                }
            },
            new AllWindowFunction<TradeOrderBean, TradeOrderBean, TimeWindow>() {
                @Override
                public void apply(TimeWindow window, Iterable<TradeOrderBean> values, Collector<TradeOrderBean> out) throws Exception {
                    for (TradeOrderBean orderBean : values) {
                        orderBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        orderBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        orderBean.setTs(System.currentTimeMillis());
                        out.collect(orderBean);
                    }
                }
            }
        );

        //TODO 10.将聚合的结果写到clickhouse中
        reduceDS.print(">>");
        reduceDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_trade_order_window values(?,?,?,?,?)"));

        env.execute();
    }
}
