package com.neusoft.physicalpartitioning;
/**
 * jdk9以后的 启动VM参数
 * --illegal-access=deny
 * --add-opens java.base/java.lang=ALL-UNNAMED
 * --add-opens java.base/java.lang.reflect=ALL-UNNAMED
 * --add-opens java.base/java.lang.invoke=ALL-UNNAMED
 * --add-opens java.base/java.math=ALL-UNNAMED
 * --add-opens java.base/java.util=ALL-UNNAMED
 * --add-opens java.base/java.util.concurrent=ALL-UNNAMED
 * --add-opens java.base/java.net=ALL-UNNAMED
 * --add-opens java.base/java.text=ALL-UNNAMED
 */

import com.neusoft.pojo.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.connector.source.util.ratelimit.RateLimiterStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.connector.datagen.source.DataGeneratorSource;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.functions.source.datagen.DataGenerator;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.common.protocol.types.Field;

import java.util.*;

public class PhysicalPartitioningApp {
    private static final Random random = new Random(System.currentTimeMillis());

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        DataStreamSource<Tuple2<Integer, String>> dept = env.fromElements(
                Tuple2.of(20, "RESEARCH"),
                Tuple2.of(30, "SALES"),
                Tuple2.of(10, "ACCOUNTING")

        );
        DataStreamSource<Tuple3<Integer, String, Integer>> emp = env.fromElements(
                Tuple3.of(7369, "SMITH", 20),
                Tuple3.of(7499, "ALLEN", 30),
                Tuple3.of(7521, "WARD", 30),
                Tuple3.of(7566, "JONES", 20)
        );
        ConnectedStreams<Tuple2<Integer, String>, Tuple3<Integer, String, Integer>> connect = dept.connect(emp);
        // 多并行度下，需要根据 关联条件 进行keyby，才能保证key相同的数据到一起去，才能匹配上
        ConnectedStreams<Tuple2<Integer, String>, Tuple3<Integer, String, Integer>> connectKey =
                connect.keyBy(d -> d.f0, e -> e.f2);

        SingleOutputStreamOperator<String> result = connectKey.process(
                new CoProcessFunction<Tuple2<Integer, String>, Tuple3<Integer, String, Integer>, String>() {
                    // 定义 HashMap，缓存来过的数据，key=id，value=list<数据>
                    Map<Integer, List<Tuple2<Integer, String>>> s1Cache = new HashMap<>();
                    Map<Integer, List<Tuple3<Integer, String, Integer>>> s2Cache = new HashMap<>();

                    @Override
                    public void processElement1(Tuple2<Integer, String> value, Context ctx, Collector<String> out) throws Exception {
                        Integer id = value.f0;
                        // TODO 1.来过的s1数据，都存起来
                        if (!s1Cache.containsKey(id)) {
                            // 1.1 第一条数据，初始化 value的list，放入 hashmap
                            List<Tuple2<Integer, String>> s1Values = new ArrayList<>();
                            s1Values.add(value);
                            s1Cache.put(id, s1Values);
                        } else {
                            // 1.2 不是第一条，直接添加到 list中
                            s1Cache.get(id).add(value);
                        }
                        //TODO 2.根据id，查找s2的数据，只输出 匹配上 的数据
                        if (s2Cache.containsKey(id)) {
                            for (Tuple3<Integer, String, Integer> s2Element : s2Cache.get(id)) {
                                out.collect("s1:" + value + "<--------->s2:" + s2Element);
                            }
                        }
                    }

                    @Override
                    public void processElement2(Tuple3<Integer, String, Integer> value, Context ctx, Collector<String> out) throws Exception {
                        Integer id = value.f2;
                        // TODO 1.来过的s2数据，都存起来
                        if (!s2Cache.containsKey(id)) {
                            // 1.1 第一条数据，初始化 value的list，放入 hashmap
                            List<Tuple3<Integer, String, Integer>> s2Values = new ArrayList<>();
                            s2Values.add(value);
                            s2Cache.put(id, s2Values);
                        } else {
                            // 1.2 不是第一条，直接添加到 list中
                            s2Cache.get(id).add(value);
                        }
                        //TODO 2.根据id，查找s1的数据，只输出 匹配上 的数据
                        if (s1Cache.containsKey(id)) {
                            for (Tuple2<Integer, String> s1Element : s1Cache.get(id)) {
                                out.collect("s1:" + s1Element + "<--------->s2:" + value);
                            }
                        }
                    }
                });
        result.print();
        env.execute();
    }

    public static void main7(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<String> ds1 = env.fromElements("asdfasdf", "asdas", "fasf", "sdasd", "ffad", "dfasd");

        SingleOutputStreamOperator<Integer> ds2 = env
                .socketTextStream("localhost", 9999)
                .map(i -> Integer.parseInt(i));


        ConnectedStreams<String, Integer> connect = ds1.connect(ds2);
        connect.map(new CoMapFunction<String, Integer, String>() {
            @Override
            public String map1(String value) throws Exception {
                return null;
            }

            @Override
            public String map2(Integer value) throws Exception {
                return null;
            }
        });


        env.execute();


    }


    public static void main6(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<Integer> ds1 = env.fromElements(1, 2, 3);
        DataStreamSource<Integer> ds2 = env.fromElements(4, 5, 6);
        DataStreamSource<String> ds3 = env.fromElements("4442", "44442", "444443");

        ds1.union(ds2, ds3.map(Integer::valueOf))
                .print();

        env.execute();

    }

    /**
     * 侧边流
     *
     * @param args
     * @throws Exception
     */
    public static void main5(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        定义数据源
        DataGeneratorSource<String> dataGeneratorSource = new DataGeneratorSource(
                l -> {
                    String line = ("" + (random.nextInt(3) + 1) + "," + System.currentTimeMillis() + "," + random.nextInt(90, 100));
                    return line;
                },
                Long.MAX_VALUE,
                RateLimiterStrategy.perSecond(1),
                Types.STRING);
        SingleOutputStreamOperator<WaterSensor> source = env.fromSource(dataGeneratorSource, WatermarkStrategy.noWatermarks(), "dataGeneratorSource")
                .map(line -> {
                    String[] words = line.split(",");
                    return new WaterSensor(words[0], words[1], words[2]);
                });
        OutputTag<WaterSensor> outputTag1 = new OutputTag<>("s1", Types.POJO(WaterSensor.class));
        OutputTag<WaterSensor> outputTag2 = new OutputTag<>("s2", Types.POJO(WaterSensor.class));

        SingleOutputStreamOperator<WaterSensor> process = source.process(new ProcessFunction<WaterSensor, WaterSensor>() {

            @Override
            public void processElement(WaterSensor waterSensor, ProcessFunction<WaterSensor, WaterSensor>.Context ctx, Collector<WaterSensor> out) throws Exception {
                if ("1".equals(waterSensor.getId())) {
                    ctx.output(outputTag1, waterSensor);
                } else if ("2".equals(waterSensor.getId())) {
                    ctx.output(outputTag2, waterSensor);
                } else {
                    out.collect(waterSensor);
                }
            }
        });
        process.print("中间流");
//        SideOutputDataStream<WaterSensor> sideOutput1 = process.getSideOutput(outputTag1);
//        SideOutputDataStream<WaterSensor> sideOutput2 = process.getSideOutput(outputTag2);
//        sideOutput1.print("Left : ");
//        sideOutput2.print("Right: ");
        env.execute();
    }


    /**
     * 流的简单拆分
     *
     * @param args
     * @throws Exception
     */
    public static void main4(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        定义数据源
        DataGeneratorSource<String> dataGeneratorSource = new DataGeneratorSource(
                l -> {
                    String line = ("" + (random.nextInt(3) + 1) + "," + System.currentTimeMillis() + "," + random.nextInt(90, 100));
                    return line;
                },
                Long.MAX_VALUE,
                RateLimiterStrategy.perSecond(1),
                Types.STRING);
        SingleOutputStreamOperator<WaterSensor> source = env.fromSource(dataGeneratorSource, WatermarkStrategy.noWatermarks(), "dataGeneratorSource")
                .map(line -> {
                    String[] words = line.split(",");
                    return new WaterSensor(words[0], words[1], words[2]);
                });
        //将ds 分为两个流 ，一个是奇数流，一个是偶数流
        //使用filter 过滤两次
        SingleOutputStreamOperator<WaterSensor> ds1 = source.filter(waterSensor -> waterSensor.getId().startsWith("1"));
        SingleOutputStreamOperator<WaterSensor> ds2 = source.filter(waterSensor -> waterSensor.getId().startsWith("2"));

        ds1.print("偶数");
        ds2.print("奇数");

        //.print();
        env.execute();
    }

    /**
     * TODO :  广播
     *
     * @param args
     * @throws Exception
     */

    public static void main3(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //  - 3、重缩放分区(.rescale())
        env.setParallelism(4);//
        DataStreamSource<String> stream03 = env.addSource(new RichParallelSourceFunction<String>() {
            @Override
            public void run(SourceContext<String> ctx) throws Exception {
                for (int i = 0; i < 8; i++) {
                    //  将奇数发送到索引为1的并行子任务上
                    //  将偶数发送到索引为0的并行子任务上
                    //  这里使用了并行数据源的富函数版本
                    //  这样可以调用 getRuntimeContext 方法来获取运行时上下文的一些信息
                    if ((i + 1) % 4 == getRuntimeContext().getIndexOfThisSubtask()) {
                        ctx.collect("Index : " + getRuntimeContext().getIndexOfThisSubtask());
                    }
                }
            }

            @Override
            public void cancel() {
            }
        }).setParallelism(1);
        //  执行重缩放分区
        stream03.broadcast().print().setParallelism(4);
        env.execute();
    }


    public static void main2(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //  - 3、重缩放分区(.rescale())
        env.setParallelism(1);//
        DataStreamSource<String> stream03 = env.addSource(new RichParallelSourceFunction<String>() {
            @Override
            public void run(SourceContext<String> ctx) throws Exception {
                for (int i = 0; i < 8; i++) {
                    //  将奇数发送到索引为1的并行子任务上
                    //  将偶数发送到索引为0的并行子任务上
                    //  这里使用了并行数据源的富函数版本
                    //  这样可以调用 getRuntimeContext 方法来获取运行时上下文的一些信息
                    if ((i + 1) % 2 == getRuntimeContext().getIndexOfThisSubtask()) {
                        ctx.collect("Index : " + getRuntimeContext().getIndexOfThisSubtask());
                    }
                }
            }

            @Override
            public void cancel() {
            }
        }).setParallelism(2);
        //  执行重缩放分区
        stream03.rescale().print().setParallelism(4);
        env.execute();
    }


    /**
     * 最简单的重分区方式就是直接“洗牌”。通过调用DataStream的.shuffle()方法，将数据随机地分配到下游算子的并行任务中去。
     * 随机分区服从均匀分布（uniform distribution），所以可以把流中的数据随机打乱，均匀地传递到下游任务分区。
     * 因为是完全随机的，所以对于同样的输入数据, 每次执行得到的结果也不会相同。
     *
     * @param args
     * @throws Exception
     */
    public static void main1(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> lineStream = env.socketTextStream("localhost", 9999);
        env.setParallelism(2);
//        lineStream.shuffle().print();
        lineStream.rebalance().print();
        // 5. 执行
        env.execute();
    }
}
