package cn.kgc.gmall.app.dwm;

import cn.kgc.gmall.app.func.DimAsyncFunction;
import cn.kgc.gmall.bean.OrderDetail;
import cn.kgc.gmall.bean.OrderInfo;
import cn.kgc.gmall.bean.OrderWide;
import cn.kgc.gmall.utils.MyKafkaUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.concurrent.TimeUnit;

/**
 * 关联 order_info 和 order_detail 两个表 进行双流join
 * 有延迟 需要设置延迟时间
 *
 */
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        // 模板代码 运行环境  检查点
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        // 2.检查点相关设置
        env.enableCheckpointing(5*1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        env.getCheckpointConfig()
                .enableExternalizedCheckpoints(
                        CheckpointConfig
                                .ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
                );
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/flink/checkpoint"));
        System.setProperty("HADOOP_USER_NAME","atkgc");
        // 订单信息
        String orderInfoSourceTopic = "dwd_order_info";
        // 订单详情
        String orderDetailSourceTopic = "dwd_order_detail";
        // 组名
        String groupId = "order_wide_group";

        // 获取两条流
        DataStreamSource<String> orderInfoDS = env
                .addSource(MyKafkaUtils.getKafkaConsumerSource(orderInfoSourceTopic, groupId));
        DataStreamSource<String> orderDetailDS = env
                .addSource(MyKafkaUtils.getKafkaConsumerSource(orderDetailSourceTopic, groupId));

        // 转化为订单实体类
        SingleOutputStreamOperator<OrderInfo> orderInfoStream = orderInfoDS.map(new RichMapFunction<String, OrderInfo>() {
            SimpleDateFormat format = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
            }
            @Override
            public OrderInfo map(String value) throws Exception {
                // 将json字符串转化为实体类
                OrderInfo orderInfo = JSON.parseObject(value, OrderInfo.class);
                // 获取时间戳装入create_ts里面 方便后期设置水位线
                orderInfo.setCreate_ts(format.parse(orderInfo.getCreate_time()).getTime());
                return orderInfo;
            }
        });
        // 转化订单详情实体类
        SingleOutputStreamOperator<OrderDetail> orderDetailStream = orderDetailDS.map(new RichMapFunction<String, OrderDetail>() {
            SimpleDateFormat format = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");

            }
            @Override
            public OrderDetail map(String value) throws Exception {
                // 将json字符串转化为实体类
                OrderDetail orderDetail = JSON.parseObject(value, OrderDetail.class);
                // 获取时间戳装入create_ts里面 方便后期设置水位线
                // 2022-08-30 11:22:03
                orderDetail.setCreate_ts(format.parse(orderDetail.getCreate_time()).getTime());
                return orderDetail;
            }
        });

        // 设置订单信息水位线
        SingleOutputStreamOperator<OrderInfo> orderInfoWatermarkStream = orderInfoStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<OrderInfo>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<OrderInfo>() {
                                    @Override
                                    public long extractTimestamp(OrderInfo element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                }
                        )
        );

        // 设置订单详情水位线
        SingleOutputStreamOperator<OrderDetail> orderDetailWatermarkStream = orderDetailStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<OrderDetail>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<OrderDetail>() {
                                    @Override
                                    public long extractTimestamp(OrderDetail element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                }
                        )
        );

        // TODO: 双流合并
        // 根据关联条件进行bykey
        KeyedStream<OrderInfo, Long> orderInfoKeyedStream = orderInfoWatermarkStream.keyBy(OrderInfo::getId);
        KeyedStream<OrderDetail, Long> orderDetailKeyedStream = orderDetailWatermarkStream.keyBy(OrderDetail::getOrder_id);

        // 进行合并
        SingleOutputStreamOperator<OrderWide> orderWideDS = orderInfoKeyedStream
                .intervalJoin(orderDetailKeyedStream)
                // 设置时间差
                .between(Time.seconds(-5L), Time.seconds(5L))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo left, OrderDetail right, ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>.Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(left, right));
                    }
                });

        orderWideDS.print();
        // 假如同步关联
        // orderWideDS.map()
        // 以异步关联的方式进行维度转换  用户维度
        SingleOutputStreamOperator<OrderWide> orderWideUserStream = AsyncDataStream.unorderedWait(
                orderWideDS,
                new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        // 获取用户性别
                        String gender = dimInfo.getString("GENDER");
                        // 获取用户年龄
                        String birthday = dimInfo.getString("BIRTHDAY");
                        // 根据生日求年龄
                        // 现在的时间 - 出生时间
                        long ageTimeMillis = 0;
                        try {
                            ageTimeMillis = System.currentTimeMillis() - new SimpleDateFormat("yyyy-MM-dd").parse(birthday).getTime();
                        } catch (ParseException e) {
                            throw new RuntimeException(e);
                        }
                        // 计算成为年龄
                        Long age = ageTimeMillis / 1000L / 60L / 60L / 24L / 365L;
                        // 合并到宽表中

                        orderWide.setUser_gender(gender);
                        orderWide.setUser_age(age.intValue());

                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getUser_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        // orderWideStream.print();

        // 测试: maxwell kafka hadoop hbase zk baseDBApp  orderWideApp redis
        // maxwell-bootstrap --user maxwell  --password 123456 --host hadoop102  --database gmall_2022 --table user_info --client_id maxwell_1

        // 加载地区维度

        SingleOutputStreamOperator<OrderWide> orderWideAreaStream = AsyncDataStream.unorderedWait(
                orderWideUserStream,
                new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        //    String province_name;//查询维表得到
                        //    String province_area_code;
                        //    String province_iso_code;
                        //    String province_3166_2_code;
                        String name = dimInfo.getString("NAME");
                        String area_code = dimInfo.getString("AREA_CODE");
                        String iso_code = dimInfo.getString("ISO_CODE");
                        String iso_3166_2 = dimInfo.getString("ISO_3166_2");
                        orderWide.setProvince_name(name);
                        orderWide.setProvince_area_code(area_code);
                        orderWide.setProvince_iso_code(iso_code);
                        orderWide.setProvince_3166_2_code(iso_3166_2);
                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getProvince_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );
        // orderWideAreaStream.print();

        // 加载sku维度
        SingleOutputStreamOperator<OrderWide> orderWideSKUStream = AsyncDataStream.unorderedWait(
                orderWideAreaStream,
                new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        orderWide.setSku_name(dimInfo.getString("SKU_NAME"));
                        orderWide.setCategory3_id(dimInfo.getLong("CATEGORY3_ID"));
                        orderWide.setSpu_id(dimInfo.getLong("SPU_ID"));
                        orderWide.setTm_id(dimInfo.getLong("TM_ID"));
                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getSku_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        // 先加载sku 才能加载spu
        SingleOutputStreamOperator<OrderWide> orderWideSPUStream = AsyncDataStream.unorderedWait(
                orderWideSKUStream,
                new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        orderWide.setSpu_name(dimInfo.getString("SPU_NAME"));
                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getSpu_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );
        // 加载品类
        SingleOutputStreamOperator<OrderWide> orderWideCategoryStream = AsyncDataStream.unorderedWait(
                orderWideSPUStream,
                new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        orderWide.setCategory3_name(dimInfo.getString("NAME"));
                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getCategory3_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );
        // 品牌
        SingleOutputStreamOperator<OrderWide> orderWideAllStream = AsyncDataStream.unorderedWait(
                orderWideCategoryStream,
                new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {

                    @Override
                    protected void join(OrderWide orderWide, JSONObject dimInfo) {

                        orderWide.setTm_name(dimInfo.getString("TM_NAME"));
                    }

                    @Override
                    protected String getKey(OrderWide orderWide) {
                        return orderWide.getTm_id().toString();
                    }
                },
                600,
                TimeUnit.SECONDS
        );

        // 最后将数据往kafka中写入
        String topic = "dwm_order_wide";
        orderWideAllStream
                // 先转换为字符串
                        .map(JSON::toJSONString)
                        .addSink(MyKafkaUtils.getKafkaProducerSink(topic));

        env.execute();
    }


}
