package yuekao8.dws;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao8.entity.*;
import yuekao8.util.AsyncIOUtil;
import yuekao8.util.KafkaUtil;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

public class WideData {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //交易域下单事务事实表：关联订单明细表和订单表和维度业务表形成订单明细表，写入 Kafka 主题。
        //4.1）编写Flink流式程序，从dwd层实时消费业务数据，过滤获取订单相关数据（order_info和order_detail），设置数据时间字段值，及允许乱序最大水位线为2秒；（5分）
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("ods_base_topic"));


        SingleOutputStreamOperator<OrderInfo> orderinfodata = streamSource.filter(x -> "order_info".equals(JSON.parseObject(x).getString("table")))
                .map(new MapFunction<String, OrderInfo>() {
                    @Override
                    public OrderInfo map(String s) throws Exception {
                        String data = JSON.parseObject(s).getString("data");
                        return JSON.parseObject(data, OrderInfo.class);
                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy
                        .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner((event, timestamp) -> {
                            try {
                                return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(event.getCreate_time()).getTime();
                            } catch (ParseException e) {
                                throw new RuntimeException(e);
                            }
                        }));

//        orderinfodata.print();
        SingleOutputStreamOperator<OrderDetail> orderdetaildata = streamSource.filter(x -> "order_detail".equals(JSON.parseObject(x).getString("table")))
                .map(new MapFunction<String, OrderDetail>() {
                    @Override
                    public OrderDetail map(String s) throws Exception {
                        String data = JSON.parseObject(s).getString("data");
                        return JSON.parseObject(data, OrderDetail.class);
                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy
                        .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner((event, timestamp) -> {
                            try {
                                return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(event.getCreate_time()).getTime();
                            } catch (ParseException e) {
                                throw new RuntimeException(e);
                            }
                        }));
//        orderdetaildata.print();

        //4.2）采用Interval Join间隔关联方式，将订单明细数据流dwd_order_detail_topic和订单数据数据流dwd_order_info_topic进行关联，时间间隔范围为10秒；（5分）
        SingleOutputStreamOperator<Tuple2<OrderInfo, OrderDetail>> joinorder = orderinfodata
                .keyBy(x -> x.getId())
                .intervalJoin(orderdetaildata.keyBy(x -> x.getOrder_info_id()))
                .between(Time.milliseconds(-5), Time.milliseconds(5))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, Tuple2<OrderInfo, OrderDetail>>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, ProcessJoinFunction<OrderInfo, OrderDetail, Tuple2<OrderInfo, OrderDetail>>.Context context, Collector<Tuple2<OrderInfo, OrderDetail>> collector) throws Exception {
                        collector.collect(new Tuple2<>(orderInfo, orderDetail));
                    }
                });
//        joinorder.print();

        //4.3）异步（或同步）关联Hbase数据库维度表数据，进行拉宽操作，实时存储数据Kafka队列：dws_trade_orders。（5分）
        SingleOutputStreamOperator<Tuple6<OrderInfo, OrderDetail, ProductSpu, ProductSku, Region, Shop>> tuple6 = AsyncDataStream.unorderedWait(joinorder, new AsyncIOUtil(), 1000, TimeUnit.MILLISECONDS, 100);
//        tuple6.print();

        tuple6.map(x->JSON.toJSONString(x)).addSink(KafkaUtil.kafkaSink("dws_trade_orders"));
        //备注：订单业务数据，关联维度后，最终拉宽表数据，可以时json字符串，也可以时text文本数据。
        env.execute();
    }
}
