package yuekao3.dwd;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao3.entity.*;
import yuekao3.util.AsyncIOUtil;
import yuekao3.util.KafkaUtil;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

public class WideTable {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //（DWD层）读取Kafka队列中ODS层数据，自定义Watermark水位线设置事件时间字段值和允许最大时间，
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("ods_db_data"));
        OutputTag<String> order_info = new OutputTag<String>("order_info") {
        };
        OutputTag<String> order_detail = new OutputTag<String>("order_detail") {
        };
        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String table = JSON.parseObject(s).getString("table");
                if ("order_info".equals(table)) {
                    context.output(order_info, s);
                }
                if ("order_detail".equals(table)) {
                    context.output(order_detail, s);
                }
            }
        });
        SingleOutputStreamOperator<OrderInfo> orderinfodata = process.getSideOutput(order_info).map(new MapFunction<String, OrderInfo>() {
            @Override
            public OrderInfo map(String s) throws Exception {
                String data = JSON.parseObject(s).getString("data");
                return JSON.parseObject(data, OrderInfo.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));

        SingleOutputStreamOperator<OrderDetail> orderdetaildata = process.getSideOutput(order_detail).map(new MapFunction<String, OrderDetail>() {
            @Override
            public OrderDetail map(String s) throws Exception {
                String data = JSON.parseObject(s).getString("data");
                return JSON.parseObject(data, OrderDetail.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));
        // 将订单数据和订单明细数据，采用自定义双流join方式关联，形成订单宽表数据流；（8分）
        SingleOutputStreamOperator<Tuple2<OrderInfo, OrderDetail>> process1 = orderinfodata
                .keyBy(x -> x.getId())
                .intervalJoin(orderdetaildata.keyBy(x -> x.getOrder_id()))
                .between(Time.milliseconds(-5), Time.milliseconds(5))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, Tuple2<OrderInfo, OrderDetail>>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, ProcessJoinFunction<OrderInfo, OrderDetail, Tuple2<OrderInfo, OrderDetail>>.Context context, Collector<Tuple2<OrderInfo, OrderDetail>> collector) throws Exception {
                        collector.collect(new Tuple2<>(orderInfo, orderDetail));
                    }
                });
//        process1.print();
        //4）、（DWD层）采用异步IO方式，请求Hbase数据库，关联维度表数据（sku_id、user_id、province_id），构建下单宽表数据，存储Kafka消息队列：dwd_db_orders（8分）
        SingleOutputStreamOperator<Tuple5<OrderInfo, OrderDetail, UserInfo, BaseProvince, SkuInfo>> tuple5 = AsyncDataStream.unorderedWait(process1, new AsyncIOUtil(), 1000, TimeUnit.MILLISECONDS, 100);
//        tuple5.print();
        tuple5.map(x->JSON.toJSONString(x)).addSink(KafkaUtil.kafkaSink("dwd_db_orders"));
        env.execute();
    }
}
