package com.bw.yk09;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Properties;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;

public class test6 {
    public static void main(String[] args) throws Exception {
        //创建流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "test6");

        DataStream<String> order_order_nfoStream = env.addSource(new FlinkKafkaConsumer<>("dwd_order_info", new SimpleStringSchema(), properties));
        DataStream<String> order_org_boundStream = env.addSource(new FlinkKafkaConsumer<>("dwd_order_org_bound", new SimpleStringSchema(), properties));

        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        SingleOutputStreamOperator<Order_info> orderInfoDS = order_order_nfoStream.map(new MapFunction<String, Order_info>() {
            @Override
            public Order_info map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject data = jsonObject.getJSONObject("data");
                Order_info bean=JSON.parseObject(data.toJSONString(), Order_info.class);


                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                sdf.setTimeZone(TimeZone.getTimeZone("GMT+8"));
                bean.setCreate_time(sdf.format(sdf.parse(bean.getCreate_time()).getTime()));
                bean.setCreate_ts(sdf.parse(bean.getCreate_time()).getTime());
                return bean;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<Order_info>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                        return event.getCreate_ts();
                }));
        //orderInfoDS.print();

        SingleOutputStreamOperator<OrderOrgBound> orderOrgBoundDS = order_org_boundStream.map(new MapFunction<String, OrderOrgBound>() {
            @Override
            public OrderOrgBound map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                JSONObject data = jsonObject.getJSONObject("data");
                OrderOrgBound bean=JSON.parseObject(data.toJSONString(), OrderOrgBound.class);

                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                sdf.setTimeZone(TimeZone.getTimeZone("GMT+8"));
                bean.setCreate_time(sdf.format(sdf.parse(bean.getCreate_time()).getTime()));
                bean.setCreate_ts(sdf.parse(bean.getCreate_time()).getTime());
                return bean;
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderOrgBound>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> {
                    return event.getCreate_ts();
                }));

        SingleOutputStreamOperator<OrderInfoWide> joinDS = orderInfoDS
                .keyBy(x -> x.getId())
                .intervalJoin(orderOrgBoundDS.keyBy(x -> x.getOrder_id()))
                .between(Time.seconds(-5), Time.seconds(0))
                .process(new ProcessJoinFunction<Order_info, OrderOrgBound, OrderInfoWide>() {
                    @Override
                    public void processElement(Order_info left, OrderOrgBound right, ProcessJoinFunction<Order_info, OrderOrgBound, OrderInfoWide>.Context ctx, Collector<OrderInfoWide> out) throws Exception {
                        out.collect(new OrderInfoWide(left,right));
                    }
                });


        SingleOutputStreamOperator<OrderInfoWide> asyncDS = AsyncDataStream.unorderedWait(joinDS, new AsyncIOFuncykwide(), 1000, TimeUnit.MILLISECONDS, 100);

        asyncDS.map(JSON::toJSONString).addSink(MyKafkaUtil.getKafkaProducer("dwm_order_info_bound_wide"));

        asyncDS.addSink(
                JdbcSink.sink("insert into yk09_order_wide (order_id,order_status,city_name,org_name,bound_status,create_time,update_time,distance) " +
                                "values(?,?,?,?,?,?,?)"
                        ,new JdbcStatementBuilder<OrderInfoWide>()
                        {
                            @Override
                            public void accept(PreparedStatement preparedStatement, OrderInfoWide orderInfoWide) throws SQLException {
                                preparedStatement.setObject(1,orderInfoWide.getOrder_info().getId());
                                preparedStatement.setObject(2,orderInfoWide.getOrder_info().getStatus());
                                preparedStatement.setObject(3,orderInfoWide.getCity_name());
                                preparedStatement.setObject(4,orderInfoWide.getOrgan_name());
                                preparedStatement.setObject(5,orderInfoWide.getOrderOrgBound().getStatus());
                                preparedStatement.setObject(6,orderInfoWide.getOrder_info().getCreate_time());
                                preparedStatement.setObject(7,orderInfoWide.getOrder_info().getUpdate_time());
                                preparedStatement.setObject(8,orderInfoWide.getOrder_info().getDistance());
                            }
                        },
                        JdbcExecutionOptions.builder()
                                .withBatchSize(0)
                                .build()
                        ,new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                                .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                                .withUrl("jdbc:clickhouse://hadoop-single:8123/default")
                                .build()

                )
        );

        env.execute();
    }
}
