package com.ruoyi.flink.dataStream;

import com.ruoyi.flink.source.AsyncDatabaseRequest;
import com.ruoyi.flink.source.CustomParallelSourceFunction;
import com.ruoyi.flink.source.CustomRichParallelSourceFunction;
import com.ruoyi.flink.source.CustomSourceFunction;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.util.Collector;

import java.util.concurrent.TimeUnit;

/**
 * @program: ruoyi
 * @description: 数据源 练习
 * @author: zengchen
 * @create: 2020-11-19 17:26
 **/
public class SourceTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1);

        // 1.基本方法
//        DataStreamSource<Integer> listDataStreamSource = env.fromElements(1, 2, 3);
//        DataStreamSource<Integer> integerDataStreamSource = env.fromCollection(Arrays.asList(1, 2, 3));
//        DataStreamSource<String> stringDataStreamSource = env.readTextFile("", "utf-8");


        // 2.DataStream连接器 ，有的可以当作数据源，有的只能作为输出sink
//        kafka , rabbitMQ, hadoop 等等

        // 3.addSource   实现 SourceFunction 接口，自定义数据源
//        customSourceFunction(env); // 无法并行执行的自定义数据源
//        customParallelSourceFunction(env); // 可以并行执行的自定义数据源
//        customRichParallelSourceFunction(env); // 可以并行执行的自定义数据源,可以获得 runtimeContext 对象

        // 4.外部数据访问的异步I / O
//        asyncOutData(env);

        // 5.源的合并连接
        dataSourceCollect(env);


        env.execute("websocket");
    }

    private static void dataSourceCollect(StreamExecutionEnvironment env) {
        DataStreamSource<Long> dataStreamSource1 = env.addSource(new CustomSourceFunction());
        DataStreamSource<Long> dataStreamSource2 = env.addSource(new CustomSourceFunction());
        ConnectedStreams<Long, Long> connect = dataStreamSource1.connect(dataStreamSource2);
        connect.flatMap(new CoFlatMapFunction<Long, Long, String>() {
            @Override
            public void flatMap1(Long value, Collector<String> out) throws Exception {
                out.collect("streamSource1:" + value);
            }

            @Override
            public void flatMap2(Long value, Collector<String> out) throws Exception {
                out.collect("streamSource2:" + value);
            }
        }).print();
    }

    private static void customRichParallelSourceFunction(StreamExecutionEnvironment env) {
        DataStreamSource<Long> dataStreamSource = env
                .addSource(new CustomRichParallelSourceFunction())
                .setParallelism(1) // 如果不设置，默认电脑核数
                ;
        dataStreamSource.print();
    }

    private static void customParallelSourceFunction(StreamExecutionEnvironment env) {
        DataStreamSource<Long> dataStreamSource = env
                .addSource(new CustomParallelSourceFunction())
                .setParallelism(2)  // 如果不设置，默认电脑核数
                ;
        dataStreamSource.print();
    }


    private static void customSourceFunction(StreamExecutionEnvironment env) {
        DataStreamSource<Long> dataStreamSource = env.addSource(new CustomSourceFunction())
//                .setParallelism(2)   // 报异常，验证了无法并行执行
                ;
        dataStreamSource
                .print()
//                .setParallelism(1)
        ;
    }

    // 外部数据访问的异步 I / O
    private static void asyncOutData(StreamExecutionEnvironment env) {
        DataStreamSource<String> localhost = env.socketTextStream("localhost", 9999);
        SingleOutputStreamOperator<String> map = localhost.map(e -> e.trim());
        SingleOutputStreamOperator<Tuple4<String, Integer, String, String>> tuple4SingleOutputStreamOperator =
                AsyncDataStream.unorderedWait(map, new AsyncDatabaseRequest(), 1000, TimeUnit.MILLISECONDS, 100);
        tuple4SingleOutputStreamOperator.print();
    }

}
