package com.bw.ads;

import com.alibaba.fastjson.JSON;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.bw.bean.Mn5TM4Wide3;
import com.bw.utils.JsonDeserializationSchemaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;
import java.util.concurrent.TimeUnit;

public class Test7 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DebeziumSourceFunction<String> source = MySQLSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("tms01")
                .tableList("tms01.order_info")
                .username("root")
                .password("123456")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDeserializationSchemaUtil())
                .build();

        DataStreamSource<String> stream = env.addSource(source);
        SingleOutputStreamOperator<String> filterDS = stream.filter(x -> JSON.parseObject(x).getJSONObject("data") != null && JSON.parseObject(x).getJSONObject("data").size() > 0);


        //60030	揽收
        //60050	发单 运输
        //60060	转运完成 中转
        //60070	派送成功
        SingleOutputStreamOperator<Mn5TM4Wide3> mapDS = filterDS.map(new MapFunction<String, Mn5TM4Wide3>() {
            @Override
            public Mn5TM4Wide3 map(String s) throws Exception {
                String data = JSON.parseObject(s).getString("data");
                Mn5TM4Wide3 mn5TM4Wide = JSON.parseObject(data, Mn5TM4Wide3.class);
                mn5TM4Wide.setCreate_time(mn5TM4Wide.getCreate_time().replace("T"," ").replace("Z",""));
                mn5TM4Wide.setZong(1);
                if(mn5TM4Wide.getStatus().equals("60030")){
                    mn5TM4Wide.setLs(1);
                }else if(mn5TM4Wide.getStatus().equals("60050")){
                    mn5TM4Wide.setYs(1);
                }else if(mn5TM4Wide.getStatus().equals("60060")){
                    mn5TM4Wide.setZz(1);
                }else if(mn5TM4Wide.getStatus().equals("60070")){
                    mn5TM4Wide.setPs(1);
                }
                return mn5TM4Wide;
            }
        });

        SingleOutputStreamOperator<Mn5TM4Wide3> asyncDS = AsyncDataStream.unorderedWait(mapDS, new AsyncIOFunc4_3(), 1000, TimeUnit.MILLISECONDS, 100);

        asyncDS.print();

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop102:9092");
        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(
                "mn5tm4topic3",
                new SimpleStringSchema(),
                properties);

        asyncDS.map(x->JSON.toJSONString(x)).addSink(myProducer);

        env.execute();
    }
}
