package cn.jly.flink.source2sink.kafka;

import cn.jly.flink.entity.Metric;
import cn.jly.flink.utils.ConstantUtils;
import cn.jly.flink.utils.FlinkUtils;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;

/**
 * @PackageName cn.jly.flink.source
 * @ClassName KafkaSourceDemo
 * @Description 测试连接kafka
 * @Author 姬岚洋
 * @Date 2021/1/13 上午 11:18
 */
public class KafkaSourceDemo {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = FlinkUtils.getStreamExecutionEnv();

        final Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ConstantUtils.Kafka.BOOTSTRAP_SERVERS_VALUE);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test");
        properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");

        /*
            Kafka topic：这个代表了 Flink 要消费的是 Kafka 哪个 Topic，如果你要同时消费多个 Topic 的话，那么你可以传入一个 Topic List 进去，另外也支持正则表达式匹配 Topic
            序列化：上面代码我们使用的是 SimpleStringSchema
            配置属性：将 Kafka 等的一些配置传入
         */
        final FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(
                // topic
                "metric_test",
                // string序列化
                new SimpleStringSchema(),
                // 配置
                properties
        );
        // 从最新的开始消费
        kafkaConsumer.setStartFromLatest();

        env.addSource(kafkaConsumer)
                .flatMap(new FlatMapFunction<String, Metric>() {
                    @Override
                    public void flatMap(String value, Collector<Metric> out) throws Exception {
                        if (StringUtils.isNotEmpty(value)) {
                            out.collect(JSON.parseObject(value, Metric.class));
                        }
                    }
                })
                .print();

        FlinkUtils.executeStream(env, "KafkaSourceDemo");
    }
}
