import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

public class Demo02_source {

    /**
     * 读取文件形式的数据源
     * 把每行数据当成实时流中的每条数据
     * 数据可以进入到不同的并行度中
     * 如果自定检测出不会再接收到数据源的数据的话，flink也会自动终止
     * @param env
     */
    public static void source_file(StreamExecutionEnvironment env){

        //此方法好用，但是已经不建议使用了
//        DataStreamSource<String> lineDataStream = env.readTextFile("D:/data/data_flink/sensor");

        //新版本建议使用的方式
        FileSource fileSource = FileSource
                .forRecordStreamFormat(
                    new TextLineInputFormat(),
                    new Path("D:/data/data_flink/sensor")
                )
                //如果不加此行 读完现有数据就自动终止
                //如果加此行并设置时间，每隔这个时间读取这个目录下的新文件内容
                .monitorContinuously(Duration.ofSeconds(5))
                .build();

        DataStreamSource<String> lineDataStream = env.fromSource(
                fileSource,
                WatermarkStrategy.noWatermarks(),
                "file"
        );

        lineDataStream.print();

    }
    /**
     * 读取集合形式的数据源
     * @param env
     */
    public static void source_collection(StreamExecutionEnvironment env){

        List<String> sensorList = new ArrayList<>();
        sensorList.add("sensor_1,1547718199,30.80");
        sensorList.add("sensor_6,1547718201,15.40");
        sensorList.add("sensor_7,1547718202,6.27");
        sensorList.add("sensor_8,1547718203,28.70");
        sensorList.add("sensor_6,1547718204,12.49");
        sensorList.add("sensor_1,1547718205,15.50");

        DataStreamSource<String> dataStream = env.fromCollection(sensorList);

        dataStream.print();
    }
    /**
     * 读取元素形式的数据源
     * @param env
     */
    public static void source_items(StreamExecutionEnvironment env){

        DataStreamSource<String> dataStream = env.fromElements(
                "sensor_1,1547718199,30.80",
                "sensor_6,1547718201,15.40",
                "sensor_7,1547718202,6.72",
                "sensor_8,1547718203,28.72",
                "sensor_8,1547718204,12.72",
                "sensor_8,1547718205,6.72"
        );
        dataStream.print();

    }
    /**
     * 读取socket形式的数据源
     * @param env
     */
    public static DataStreamSource<String> source_socket(StreamExecutionEnvironment env,int port){
        return env.socketTextStream("node101",port);
    }
    /**
     * 读取kafka形式的数据源
     * @param env
     */
    public static void source_kafka(StreamExecutionEnvironment env){

        KafkaSource kafkaSource =  KafkaSource.<String>builder()
                .setBootstrapServers("node101:9092,node102:9092,node103:9092,node104:9092")
                .setTopics("supermarket")
                .setGroupId("supermarket")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();


        DataStreamSource<String> dataStream = env.fromSource(
                kafkaSource,
                WatermarkStrategy.noWatermarks(),
                "kafka"
        );

        dataStream.print();
    }
    /**
     * 读取自定义形式的数据源
     * @param env
     */
    public static void source_self(StreamExecutionEnvironment env){

        //此版本已经不建议使用addSource 但是源码中还在用，他用我就用
        DataStreamSource<String> dataStream= env.addSource(
            new SourceFunction<String>() {

                boolean flag = true;
                Random rand = new Random();

                @Override
                public void run(SourceContext<String> ctx) throws Exception {

                    while(flag){
                        String sensorId = "sensor-"+(rand.nextInt(10)+1);
                        long timestamp = System.currentTimeMillis();
                        double temperature = rand.nextInt(81)-40;
                        String message = sensorId+","+timestamp+","+temperature;
                        ctx.collect(message);
                        Thread.sleep(rand.nextInt(2000)+1000);
                    }
                }

                @Override
                public void cancel() {
                    flag = false;
                }
            }
        );

        dataStream.print();

    }

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        source_file(env);
//        source_collection(env);
//        source_items(env);
//        source_kafka(env);
        source_self(env);


        try {
            env.execute("source");
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

}
