package yuekao9.dws;

import yuekao9.util.KafkaUtil;
import yuekao9.entity.BaseDic;
import yuekao9.entity.OrderInfo;
import yuekao9.util.AsyncIOUtil;
import com.alibaba.fastjson.JSON;
import yuekao9.entity.OrderCargo;
import yuekao9.entity.BaseRegionInfo;
import org.apache.flink.util.Collector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;

public class CountKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("tms_ods"));
//        streamSource.print();
        SingleOutputStreamOperator<OrderInfo> orderInfostartdata = streamSource.filter(x -> "order_info".equals(JSON.parseObject(x).getString("table"))).map(new MapFunction<String, OrderInfo>() {
            @Override
            public OrderInfo map(String s) throws Exception {
                OrderInfo data = JSON.parseObject(JSON.parseObject(s).getString("data"), OrderInfo.class);
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
                sdf.setTimeZone(TimeZone.getTimeZone("UTC+8"));
                long time = sdf.parse(data.getCreate_time()).getTime();
                data.setCreate_time_day(data.getCreate_time().substring(0, 10));
                data.setTs(time);
                return data;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));

//        orderInfostartdata.print();

        SingleOutputStreamOperator<OrderCargo> orderCargodata = streamSource.filter(x -> "order_cargo".equals(JSON.parseObject(x).getString("table"))).map(new MapFunction<String, OrderCargo>() {
            @Override
            public OrderCargo map(String s) throws Exception {
                OrderCargo data = JSON.parseObject(JSON.parseObject(s).getString("data"), OrderCargo.class);
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
                sdf.setTimeZone(TimeZone.getTimeZone("UTC+8"));
                long time = sdf.parse(data.getCreate_time()).getTime();
                data.setTs(time);
                return data;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderCargo>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));
//        orderCargodata.print();


        SingleOutputStreamOperator<Tuple2<OrderInfo, OrderCargo>> orderjoin = orderInfostartdata
                .keyBy(x -> x.getId())
                .intervalJoin(orderCargodata.keyBy(x -> x.getOrder_id()))
                .between(Time.milliseconds(-30), Time.milliseconds(30))
                .process(new ProcessJoinFunction<OrderInfo, OrderCargo, Tuple2<OrderInfo, OrderCargo>>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderCargo orderCargo, ProcessJoinFunction<OrderInfo, OrderCargo, Tuple2<OrderInfo, OrderCargo>>.Context context, Collector<Tuple2<OrderInfo, OrderCargo>> collector) throws Exception {
                        collector.collect(new Tuple2<>(orderInfo, orderCargo));
                    }
                });
//        orderjoin.print();


//        1.统计当日各货物类型下单次数和金额。（2分）

        SingleOutputStreamOperator<Tuple4<OrderInfo, OrderCargo, BaseRegionInfo, BaseDic>> tuple4 = AsyncDataStream.unorderedWait(orderjoin, new AsyncIOUtil(), 1000, TimeUnit.MILLISECONDS, 100);
        tuple4.print();
        SingleOutputStreamOperator<String> map = tuple4.map(x -> JSON.toJSONString(x));
        map.addSink(KafkaUtil.kafkaSink("dws_widedata"));

        env.execute();
    }
}
