package com.zx.learn.flink.utils;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @desc: 往 test-topic 中生产数据
 */
public class KafkaProduce {

    public static void main(String[] args) throws Exception {

        // 包括但不限于kafka的消费者id
        String applicationName = "KafkaProduce";

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);
        ModelUtil.deployRocksdbCheckpoint(env, applicationName, 3 * 1000);
        ModelUtil.deployRestartStrategy(env);

        env
                .addSource(KafkaUtil.getKafkaConsumerAvro("bigdata_mysql_binlog_avro", applicationName, CommonRow.class))
                .filter(new FilterFunction<CommonRow>() {
                    @Override
                    public boolean filter(CommonRow commonRow) throws Exception {
                        return "fmys_goods_lib".equals(commonRow.getTb());
                    }
                })
                .map(new MapFunction<CommonRow, String>() {
                    @Override
                    public String map(CommonRow value) throws Exception {
                        String s = JSON.parseObject(JSON.toJSONString(value)).toJSONString();
                        System.out.println(s);
                        return s;
                    }
                })
                .addSink(KafkaUtil.getKafkaProducerAvroForExactlyOnce("test-topic", String.class))
        ;

        env.execute(applicationName);
    }
}
