package com.bw.app.dwm;

import com.alibaba.fastjson.JSON;
import com.bw.bean.*;
import com.bw.utils.MyKafkaUtil;
import lombok.SneakyThrows;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;

public class PaymentWideAppPersona {
    // 业务数据源 app -->nginx-->springboot---->mysql(业务库)--->FlinkCDC--->Kafka(ods)--->Flink(base_db_app)----Kafka(dwd)/Hbase(dim)---Flink(OrderWideApp)----Kafka(DWM)---Flink(PaymentWideApp)---kafak

    //
    public static void main(String[] args) throws Exception {
        // 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //        开启CK，创建文件夹
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:9820/gmall-flink/ck"));
//         每隔5秒保存一次
//        env.enableCheckpointing(5000L);
//        精准消费一次
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        配置超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(100000L);
//        同时最大可以运行几个ck
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        上一个ck的尾部到下一个ck的头之间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 2.消费kafka dwm_order_wide 转成JavaBean对象  并生成水位线
        String paymentInfoSourceTopic = "dwd_payment_info";
        String orderWideSourceTopic = "dwm_order_wide";
        String paymentWideSinkTopic = "dwm_payment_wide";
        String groupId = "payment_wide_app_2105b";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaConsumer(orderWideSourceTopic, groupId));
        SingleOutputStreamOperator<OrderWidePersona> orderWideDS = kafkaDS.map(new MapFunction<String, OrderWidePersona>() {
            @Override
            public OrderWidePersona map(String value) throws Exception {
                return JSON.parseObject(value, OrderWidePersona.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<OrderWidePersona>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<OrderWidePersona>() {
            @SneakyThrows
            @Override
            public long extractTimestamp(OrderWidePersona orderWide, long l) {
                SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                return simpleDateFormat.parse(orderWide.getCreate_time()).getTime();
            }
        }));
        // 3.消费kafka dwd_payment_info
        SingleOutputStreamOperator<PaymentInfo> paymentInfoDS = env.addSource(MyKafkaUtil.getKafkaConsumer(paymentInfoSourceTopic, groupId)).map(new MapFunction<String, PaymentInfo>() {
            @Override
            public PaymentInfo map(String value) throws Exception {
                return JSON.parseObject(value, PaymentInfo.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<PaymentInfo>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<PaymentInfo>() {
            @SneakyThrows
            @Override
            public long extractTimestamp(PaymentInfo paymentInfo, long l) {
                SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                return simpleDateFormat.parse(paymentInfo.getCreate_time()).getTime();
            }
        }));
        // 4. 双流join 注意时间
        SingleOutputStreamOperator<PaymentWidePersona> processDS = paymentInfoDS.keyBy(new KeySelector<PaymentInfo, String>() {
            @Override
            public String getKey(PaymentInfo paymentInfo) throws Exception {
                return String.valueOf(paymentInfo.getOrder_id());
            }
        }).intervalJoin(orderWideDS.keyBy(new KeySelector<OrderWidePersona, String>() {
            @Override
            public String getKey(OrderWidePersona orderWide) throws Exception {
                return String.valueOf(orderWide.getOrder_id());
            }
        }))
                .between(Time.minutes(-15), Time.seconds(5))// 注意时间
                .process(new ProcessJoinFunction<PaymentInfo, OrderWidePersona, PaymentWidePersona>() {
                    @Override
                    public void processElement(PaymentInfo paymentInfo, OrderWidePersona orderWide, Context context, Collector<PaymentWidePersona> collector) throws Exception {
                        collector.collect(new PaymentWidePersona(paymentInfo, orderWide));
                    }
                });
        // 5. 写入Kafka
        processDS.print();
        processDS.map(JSON::toJSONString).addSink(MyKafkaUtil.getKafkaProducer(paymentWideSinkTopic));
        // 输出到C看中
        // 6.启动任务
        env.execute("PaymentWideApp");
    }
}
