import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

/**
 * @ClassName KafkaSourceTest
 * @Deacription TODO
 * @Author wushumin
 * @Date 2021-06-22 17:34
 * @Version 1.0
 **/
public class KafkaSourceTest {
    public static void main(String[] args) throws Exception {
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "cdh1.lcbint.cn:9092,cdh2.lcbint.cn:9092,cdh3.lcbint.cn:9092");
// only required for Kafka 0.8
        properties.setProperty("zookeeper.connect", "cdh1.lcbint.cn:2181,cdh2.lcbint.cn:2181,cdh3.lcbint.cn:2181");
        properties.setProperty("group.id", "testflink_dev");
        FlinkKafkaConsumer011 kafkaConsumer011 = new FlinkKafkaConsumer011<>("testtopic",new SimpleStringSchema(),properties);

        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource kafkaDataStream = environment.addSource(kafkaConsumer011);
        DataStream<WordCount.Person> map = kafkaDataStream.map(item -> {
            WordCount.Person person = JSON.parseObject((String) item, WordCount.Person.class);
            return person;
        });
        map.filter(item ->item.getAge()>10).print();

        environment.execute("kafkaSource");

    }
}
