package com.bw.demo;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.bean.OrderDetail;
import com.bw.bean.OrderInfo;
import com.bw.bean.OrderWide;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;

/**
 * @ClassName Demo22
 * @Description TODO
 * @Author SXLWTT 单新龙
 * @Date 2022/4
 * @Version 1.0
 **/
public class Demo22 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(10000);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //new ProducerRecord<>(sink_table, data.toString().getBytes())
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/"));
        String topic_info ="dwd_order_info";//订单表
        String topic_detail ="dwd_order_detail";//订单详情表
        String topic_sink ="dwd_order_wide";//最终结果sink到kafka
        String groupId ="Sxl0112";
        //3）Kafka 的 dwd 层接收订单和订单明细数据（5分）
        FlinkKafkaConsumer<String> kafkaSource1 = MyKafkaUtil.getKafkaSource(topic_info, groupId);
        kafkaSource1.setStartFromEarliest();
        DataStreamSource<String> stringDataStreamSource_info = env.addSource(kafkaSource1);
        //stringDataStreamSource_info.print("1111111111111dwd_order_info>>>");
        FlinkKafkaConsumer<String> kafkaSource2 = MyKafkaUtil.getKafkaSource(topic_detail, groupId);
        kafkaSource2.setStartFromEarliest();
        DataStreamSource<String> stringDataStreamSource_detail = env.addSource(kafkaSource2);
        //stringDataStreamSource_detail.print("222222222222dwd_order_detail>>>>");



        //4)  将订单表和订单明细表进行拉宽，形成订单宽表。  （8分）
        //转JSON格式 进行格式转换    dwd_order_info
        SingleOutputStreamOperator<OrderInfo> map_info = stringDataStreamSource_info.map(
                new RichMapFunction<String, OrderInfo>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderInfo map(String s) throws Exception {
                        OrderInfo orderInfo = JSONObject.parseObject(s, OrderInfo.class);
                        orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                        return orderInfo;
                    }
                }
        );

        //转JSON格式 进行格式转换    dwd_order_detail
        SingleOutputStreamOperator<OrderDetail> map_detail = stringDataStreamSource_detail.map(
                new RichMapFunction<String, OrderDetail>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderDetail map(String s) throws Exception {
                        OrderDetail orderDetail = JSONObject.parseObject(s, OrderDetail.class);
                        orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                        return orderDetail;
                    }
                }
        );

        //设置水位线
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = map_info.assignTimestampsAndWatermarks(
                WatermarkStrategy.<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<OrderInfo>() {
                                    @Override
                                    public long extractTimestamp(OrderInfo orderInfo, long l) {
                                        return orderInfo.getCreate_ts();
                                    }
                                }
                        )
        );
        //orderInfoDS.print();
//        //订单明细设置水位线
        SingleOutputStreamOperator<OrderDetail> orderDetailDS = map_detail.assignTimestampsAndWatermarks(
                WatermarkStrategy.<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<OrderDetail>() {
                                    @Override
                                    public long extractTimestamp(OrderDetail orderInfo, long l) {
                                        return orderInfo.getCreate_ts();
                                    }
                                }
                        )
        );
        //orderDetailDS.print();
//        //按照id 进行分组
         KeyedStream<OrderInfo, Long> orderInfoKS = orderInfoDS.keyBy(OrderInfo::getId);
         KeyedStream<OrderDetail, Long> orderDetailKS = orderDetailDS.keyBy(OrderDetail::getOrder_id);
//        //双流join进行拉宽
        SingleOutputStreamOperator<JSONObject> process = orderInfoKS.intervalJoin(orderDetailKS)
                .between(Time.milliseconds(-5), Time.milliseconds(5))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, JSONObject>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, Context context, Collector<JSONObject> collector) throws Exception {
                        OrderWide orderWide = new OrderWide(orderInfo, orderDetail);
                        String s = JSON.toJSONString(orderWide);
                        //转格式
                        JSONObject jsonObject = JSON.parseObject(s);
                        //把数据放入测输出流中
                        collector.collect(jsonObject);
                    }
                });


        process.print();
        SingleOutputStreamOperator<String> map = process.map(json -> JSON.toJSONString(json));
        map.print();
        //
        //8）将拉宽后的的订单数据写回到Kafka的DWM层。（5分）
        //存入kafka
        map.addSink(MyKafkaUtil.getKafkaSink(topic_sink));
        //7）kafka数据结果查看结果准确。（5分）
        //

        //8) 使用Suger组件实现总成交金额的可视化。（5分）
        //
        //9）代码清晰，注释完整。（2分）

        env.execute();
    }
}
