package cn.young.dev.stock;

import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
import org.apache.flink.util.Collector;
import org.example.MessageWaterEmitter;

import java.util.Properties;

class StockMsgStreamHandler {

    Properties props = new Properties();
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    public static void main(String[] args) throws Exception {
        String topic = "test123";
        String path = "D:\\temp\\";
//        handler(path, topic);
    }

    public StockMsgStreamHandler(){
        env.enableCheckpointing(5000); // 非常关键，一定要设置启动检查点！！
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        props.setProperty("bootstrap.servers", "192.168.16.1:9092,192.168.16.1:9093,192.168.16.1:9094");
        props.setProperty("group.id", "flink-group");
    }

    private void handler(String path, String topic) throws Exception {

        FlinkKafkaConsumer010<String> consumer = new FlinkKafkaConsumer010<>(topic, new SimpleStringSchema(), props);
        consumer.assignTimestampsAndWatermarks(new MessageWaterEmitter());

        DataStream<Tuple1<String>> keyedStream = env
                .addSource(consumer)
                .process(new ProcessFunction<String, Tuple1<String>>() {
                    @Override
                    public void processElement(String value, Context ctx, Collector<Tuple1<String>> out) throws Exception {
                        System.out.println(value);
                    }
                });
        keyedStream.writeAsText(path);
        env.execute("Flink-Kafka demo");
    }
}
