package com.bw.yk01;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

//jar->mysql->topic-db->kafka/redis->本类->dwd_trade_orders
//读取kafka中的order_info和order_detail，使用interval join进行关联
//通过异步io关联sku_info user_info base_province
//关联后的结果写入kafka:dwd_trade_orders
public class FlinkTM3 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //1）、编写Flink流式程序，从ODS层实时消费业务数据，过滤获取订单相关数据（order_info和order_detail），设置数据时间字段值，及允许乱序最大水位线为2秒；（5分）
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream1 = env.addSource(new FlinkKafkaConsumer<>("order_info", new SimpleStringSchema(), properties));
        DataStream<String> stream2 = env.addSource(new FlinkKafkaConsumer<>("order_detail", new SimpleStringSchema(), properties));

        SingleOutputStreamOperator<OrderInfo> oiDS = stream1
                .map(x -> JSON.parseObject(x, OrderInfo.class))
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner((event, timestamp) -> {
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                            try {
                                return sdf.parse(event.getCreate_time()).getTime();
                            } catch (ParseException e) {
                                throw new RuntimeException(e);
                            }
                        })
                );
        SingleOutputStreamOperator<OrderDetail> odDS = stream2
                .map(x -> JSON.parseObject(x, OrderDetail.class))
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner((event, timestamp) -> {
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                            try {
                                return sdf.parse(event.getCreate_time()).getTime();
                            } catch (ParseException e) {
                                throw new RuntimeException(e);
                            }
                        })
                );
        //2）、采用Interval Join间隔关联方式，将订单明细数据流order_detail和订单数据数据流order_info进行关联，时间间隔范围为10秒；（5分）
        SingleOutputStreamOperator<OrderWide> joinDS = oiDS.keyBy(x -> x.getId())
                .intervalJoin(odDS.keyBy(y -> y.getOrder_id()))
                .between(Time.seconds(-5), Time.seconds(5))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>.Context context, Collector<OrderWide> collector) throws Exception {
                        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        collector.collect(new OrderWide(orderInfo, orderDetail, null, null, null,sdf.parse(orderInfo.getCreate_time()).getTime()));
                    }
                });

        //joinDS.print();
        //3）、异步（或同步）关联Redis数据库维度表数据，进行拉宽操作，实时存储数据Kafka队列：dwd_trade_orders。（5分）
        DataStream<OrderWide> resultStream = AsyncDataStream.unorderedWait(joinDS, new AsyncIOFunc(), 1000, TimeUnit.MILLISECONDS, 100);
        resultStream.print();


        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(
                "dwd_trade_orders",                  // target topic
                new SimpleStringSchema(),    // serialization schema
                properties); // fault-tolerance

        resultStream.map(x->JSON.toJSONString(x)).addSink(myProducer);


        env.execute();
    }
}
