package com.wdd.flinkdemo.kafka2mysql;

import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;

/**
 * kafka数据源
 */
@Component
public class KafkaSource {

    private static final Logger log = LoggerFactory.getLogger(KafkaSource.class);
     /*@Value("${spring.kafka.bootstrap-servers:localhost:9092}")
    private String kafkaServer;
    @Value("${spring.kafka.properties.sasl.jaas.config}")
    private String loginConfig;
    @Value("${customer.flink.cal-device-status-topic}")
    private String topic;

    private Properties getProperties() {
        Properties properties = new Properties();
        //kafka的节点的IP或者hostName，多个使用逗号分隔
        properties.setProperty("bootstrap.servers", kafkaServer);
        //kafka的消费者的group.id
        properties.setProperty("group.id", "data-nanlysis-flink-devicestatus");
        //设置kafka安全认证机制为PLAIN
        properties.setProperty("sasl.mechanism", "PLAIN");
        //设置kafka安全认证协议为SASL_PLAINTEXT
        properties.setProperty("security.protocol", "SASL_PLAINTEXT");
        //设置kafka登录验证用户名和密码
        properties.setProperty("sasl.jaas.config", loginConfig);
        return properties;
    }*/
    /**
     * 执行方法
     *
     * @throws Exception 异常
     */
    @PostConstruct
    public void execute() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);
        env.setParallelism(1);
//        FlinkKafkaConsumer<String> myConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), getProperties());
        //绑定数据源
//        DataStream<String> dataStreamSource = env.addSource(myConsumer).name("datasource-kafka");

        //模拟kafka数据
        DataStream<String> dataStreamSource = env.addSource(new SourceFunction<String>() {

            @Override
            public void run(SourceContext<String> ctx) throws Exception {
                long c = 0;
                while (true){
                    ctx.collect("test" + c++);
                    Thread.sleep(3000);
                }
            }

            @Override
            public void cancel() {

            }
        }).name("datasource-kafka");
        dataStreamSource.print().setParallelism(1);

        KeyedProcessFunction keyedProcessFunction = new KafkaDataHandleKeyFunction();
        SingleOutputStreamOperator process = dataStreamSource.keyBy(new KeySelector<String, Object>() {
            @Override
            public Object getKey(String s) throws Exception {
                return s;
            }
        }).process(keyedProcessFunction);

        //写入数据库
        process.addSink(new MySink());

        //启动任务
        new Thread(() -> {
            try {
                env.execute("deviceStatusFlinkJob");
            } catch (Exception e) {
//                log.error(e.toString(), e);
            }
        }).start();
    }
}
