package cn.doitedu.transfer;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import utils.TransferUtil;

import java.util.Properties;

/**
 * 读取kafka中的bin-log日志数据
 * 本代码的写法，只能是对每一个表来单独写一个任务，因为输出的sink只能定死目标topic
 * 可以改造成侧流输出
 */
public class CouponInfoTransferData {

    private final static String seperator = "==";
    private final static String targetTopic = "hongbao_info";

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties props = new Properties();
        props.setProperty("bootstrap.servers","h1:9092,h2:9092,h3:9092");
        props.setProperty("group.id","doitedu");

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("test1", new SimpleStringSchema(), props);
        DataStreamSource<String> ds = env.addSource(consumer);
        env.enableCheckpointing(5000);
        ds.print();

        SingleOutputStreamOperator<String> map = ds.map(new MapFunction<String, String>() {
            @Override
            public String map(String s) throws Exception {
                System.out.println(s);
                JSONObject jsonObject = JSONObject.parseObject(s);
                // 本处只实现insert的操作
                String type = jsonObject.getString("type");
                String ts = jsonObject.getString("ts");

                String table = jsonObject.getString("table");

                JSONArray array = jsonObject.getJSONArray("data");
                JSONObject data = array.getJSONObject(0);
                String res = table+seperator+"";
                if("INSERT".equals(type) && targetTopic.equals(table)){
                    res = table + seperator + TransferUtil.transferData(table, data);
                }
                return res;
            }
        });

        SingleOutputStreamOperator<String> filter = map.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String s) throws Exception {
                String[] split = s.split(seperator);
                String data = split[1];

                boolean isFilter = true;
                if (StringUtils.isBlank(data)) {
                    isFilter = false;
                }
                return isFilter;
            }
        });


        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>("h1:9092,h2:9092,h3:9092", targetTopic, new SimpleStringSchema());
        producer.setWriteTimestampToKafka(true);
        map.addSink(producer);


        env.execute("TransferData");
    }
}
