package com.neusoft.ds;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.connector.source.util.ratelimit.RateLimiterStrategy;
import org.apache.flink.connector.datagen.source.DataGeneratorSource;
import org.apache.flink.connector.datagen.source.GeneratorFunction;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.common.protocol.types.Field;

import java.util.Arrays;
import java.util.List;
import java.util.Random;

public class DataStreamApiMain {

    public static void main(String[] args) throws Exception {
        Random random = new Random();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataGeneratorSource<String> dataGeneratorSource = new DataGeneratorSource(
                l -> ("" + (random.nextInt(3) + 1) + "," + System.currentTimeMillis() + "," + random.nextInt(90, 100)),
                Long.MAX_VALUE,
                RateLimiterStrategy.perSecond(1),
                Types.STRING);

        env.fromSource(dataGeneratorSource, WatermarkStrategy.noWatermarks(), "dataGeneratorSource")
                .print();

        env.execute();

    }

    /**
     * Kafka数据源
     *
     * @param args
     * @throws Exception
     */
    public static void main4(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("localhost:9092")
                .setTopics("flink")
                .setGroupId("flink-client")
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();
        DataStream<String> ds = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafkaSource");
        ds.print("kafka");
        env.execute();

    }


    /**
     * 端口网络数据源
     *
     * @param args
     * @throws Exception
     */
    public static void main3(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStream<String> ds = env.socketTextStream("localhost", 9999);
        ds.print();
        env.execute();

    }


    /**
     * 文件数据源
     *
     * @param args
     * @throws Exception
     */

    public static void main2(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        FileSource<String> fileSource = FileSource.forRecordStreamFormat(new TextLineInputFormat(),
                new Path("D:\\bigdata\\m5-flink\\input\\words.txt")).build();
        DataStream<String> ds = env.fromSource(fileSource, WatermarkStrategy.noWatermarks(), "fileSource");
        ds.print();
        env.execute();

    }


    /**
     * 集合数据源
     *
     * @param args
     * @throws Exception
     */
    public static void main1(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        List<Integer> data = Arrays.asList(1, 2, 3);
        DataStreamSource<Integer> ds = env.fromCollection(data);
        ds.print();
        env.execute();
        System.out.println("hello world");
    }
}
