package com.bw.yk09;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Properties;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;

public class test5 {
    public static void main(String[] args) throws Exception {
        //创建流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);

        DataStreamSource<String> stream = env.addSource(new MyKafkaUtil().getKafkaConsumer("tms_ods","test5_group"));

        SingleOutputStreamOperator<String> order_cargoDS = stream.filter(x -> JSON.parseObject(x).getString("table").equals("order_cargo"));
        SingleOutputStreamOperator<String> orderInfoDS = stream.filter(x -> JSON.parseObject(x).getString("table").equals("order_info"));
        //对流中的数据类型进行转换

        SingleOutputStreamOperator<Order_info> order_infoDS=orderInfoDS.map(new MapFunction<String, Order_info>() {
            @Override
            public Order_info map(String s) throws Exception {
                Order_info orderInfo=JSON.parseObject(JSON.parseObject(s).getString("data"),Order_info.class);
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                long ts=sdf.parse(orderInfo.getCreate_time()).getTime();
                orderInfo.setCreate_ts(ts);
                return orderInfo;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<Order_info>forBoundedOutOfOrderness(Duration.ofSeconds(10))
                                .withTimestampAssigner(new SerializableTimestampAssigner<Order_info>() {
                                        @Override
                                        public long extractTimestamp(Order_info order_info, long l) {
                                            return order_info.getCreate_ts();
                                           }
                                       }
                                ));

        SingleOutputStreamOperator<Order_cargo> dwd_boundSortBeanDS=order_cargoDS.map(new MapFunction<String, Order_cargo>() {
            @Override
            public Order_cargo map(String s) throws Exception {
                Order_cargo order_cargo=JSON.parseObject(JSON.parseObject(s).getString("data"),Order_cargo.class);
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                long ts=sdf.parse(order_cargo.getCreate_time()).getTime();
                order_cargo.setCreate_ts(ts);
                return order_cargo;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<Order_cargo>forBoundedOutOfOrderness(Duration.ofSeconds(10))
                        .withTimestampAssigner(new SerializableTimestampAssigner<Order_cargo>() {
                                                   @Override
                                                   public long extractTimestamp(Order_cargo order_cargo, long l) {
                                                      return order_cargo.getCreate_ts();
                                                   }
                                               }
                        ));


        SingleOutputStreamOperator<OrderInfoCargoWide> joinDS = order_infoDS.keyBy(Order_info::getId)
                .intervalJoin(dwd_boundSortBeanDS.keyBy(Order_cargo::getOrder_id))
                .between(Time.minutes(-5), Time.seconds(5))
                .process(new ProcessJoinFunction<Order_info, Order_cargo, OrderInfoCargoWide>() {
                    @Override
                    public void processElement(Order_info order_info, Order_cargo order_cargo, Context context, Collector<OrderInfoCargoWide> collector) throws Exception {
                        collector.collect(new OrderInfoCargoWide(order_info, order_cargo));
                    }
                });


        SingleOutputStreamOperator<Order_info_stats> mapDS2 = joinDS.map(new MapFunction<OrderInfoCargoWide, Order_info_stats>() {
            @Override
            public Order_info_stats map(OrderInfoCargoWide orderInfoCargoWide) throws Exception {
                Order_info_stats order_info_stats=new Order_info_stats();
                //处理时区问题
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                sdf.setTimeZone(TimeZone.getTimeZone("GMT+8"));
                long time = sdf.parse(orderInfoCargoWide.getOrder_info().getCreate_time()).getTime();
//                order_info.setCreate_time(sdf.format(sdf.parse(order_info.getCreate_time())));
                order_info_stats.setCreate_time_by_day(orderInfoCargoWide.getOrder_info().getCreate_time().substring(0,10));
                order_info_stats.setCargo_type(orderInfoCargoWide.getOrder_cargo().getCargo_type());
                order_info_stats.setOrder_order_num(1L);
                order_info_stats.setOrder_amount(orderInfoCargoWide.getOrder_info().getAmount());
                order_info_stats.setTs(time);
                return order_info_stats;
            }
        });

        KeyedStream<Order_info_stats, Tuple2<String,String>> keyedStream = mapDS2.keyBy(new KeySelector<Order_info_stats, Tuple2<String,String>>() {
            @Override
            public Tuple2<String,String> getKey(Order_info_stats value) throws Exception {
                return new Tuple2<>(value.getCargo_type(),
                        value.getCreate_time_by_day());
            }
        });

        OutputTag<Order_info_stats> lateData=new OutputTag<>("late");
        SingleOutputStreamOperator<Order_info_stats> reduceDS=keyedStream
                .window(TumblingEventTimeWindows.of(Time.days(1),Time.seconds(10)))
                .sideOutputLateData(lateData)
                .reduce(new ReduceFunction<Order_info_stats>() {
                    @Override
                    public Order_info_stats reduce(Order_info_stats stats1, Order_info_stats stats2) throws Exception {
                        stats1.setOrder_amount(stats1.getOrder_amount().add(stats2.getOrder_amount()));
                        stats1.setOrder_order_num(stats1.getOrder_order_num() + stats2.getOrder_order_num());
                        return stats1;
                    }
                }, new WindowFunction<Order_info_stats, Order_info_stats, Tuple2<String, String>, TimeWindow>() {
                    @Override
                    public void apply(Tuple2<String, String> stringStringTuple2, TimeWindow timeWindow, Iterable<Order_info_stats> iterable, Collector<Order_info_stats> collector) throws Exception {
                        //取出窗口开始和结束时间
                        long start = timeWindow.getStart();
                        long end = timeWindow.getEnd();

                        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        String stt = sdf.format(start);
                        String edt = sdf.format(end);

                        //将窗口时间设置进JavaBean对象中
                        Order_info_stats Order_info_stats = iterable.iterator().next();
                        Order_info_stats.setStt(stt);
                        Order_info_stats.setEdt(edt);

                        //写入数据
                        collector.collect(Order_info_stats);
                    }
                });


        SingleOutputStreamOperator<Order_info_stats> reduceDSbyuser= AsyncDataStream.unorderedWait(reduceDS, new FlinkDIMAsyncIO(), 1000, TimeUnit.MILLISECONDS, 100);

        reduceDSbyuser.print();

        reduceDSbyuser.addSink(new FlinkDWSSinkClickhouse("insert into yk09sink01 values (?,?,?,?,?,?,?,?,?,?)"));





        env.execute();
    }
}
