package com.atguigu.flink.demo03;

import com.atguigu.flink.demo04.UserBean;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FilterOperator;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.MathUtils;
import org.junit.Test;

import java.util.Arrays;

/**
 * @author admin
 * @date 2021/8/9
 */
public class Transform {


    @Test
    public void map() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);


        SingleOutputStreamOperator<Tuple2<String, Integer>> returns = source.map((MapFunction<String, Tuple2<String, Integer>>)
                value -> Tuple2.of(value, value.length()))
                .returns(Types.TUPLE(Types.STRING, Types.INT));

        returns.print("map>>>");

        env.execute();

    }

    @Test
    public void flatMap() throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        DataSource<String> dataSource = env.readTextFile("D:\\project\\idea\\flink\\input\\wordcount.txt");


        // 扁平化
        FlatMapOperator<String, String> flatMap = dataSource
                .flatMap((FlatMapFunction<String, String>) (value, out)
                -> Arrays.stream(value.split(" "))
                .forEach(out::collect))
                .returns(Types.STRING);

        // 过滤
        FilterOperator<String> filter = flatMap.filter(s -> !"log".equals(s));


        filter.print();

    }

    @Test
    public void group() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        // 监听 9999 端口
        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);


        // 扁平化
        SingleOutputStreamOperator<Tuple2<String, Integer>> flatMap = source.flatMap((FlatMapFunction<String, Tuple2<String, Integer>>) (value, out)
                -> Arrays.stream(value.split(" "))
                .forEach(s -> out.collect(Tuple2.of(s, 1))))
                .returns(Types.TUPLE(Types.STRING, Types.INT));

        // 分组
        KeyedStream<Tuple2<String, Integer>, Object> keyBy = flatMap.keyBy((KeySelector<Tuple2<String, Integer>, Object>) value -> value.f0);


        // 统计
        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = keyBy.sum(1);


        sum.print();

        env.execute();

    }

    @Test
    public void shuffle() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        // 监听 9999 端口
        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);


        source.print("print>>>");

        source.shuffle().print("shuffle>>>");

        env.execute();


    }

    @Test
    public void connect() throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 字符串
        DataStreamSource<String> strSource = env.fromElements("a", "b", "c", "d");

        // 数字
        DataStreamSource<Integer> numSource = env.fromElements(1,2,3,4,5,6,7,8,9);

        // 连接
        ConnectedStreams<String, Integer> connect = strSource.connect(numSource);


        /**
         * 两个
         */
        SingleOutputStreamOperator<String> result = connect.map(new CoMapFunction<String, Integer, String>() {
            @Override
            public String map1(String value) {
                return value;
            }

            @Override
            public String map2(Integer value) {
                return value.toString();
            }
        });


        result.print();

        env.execute();

    }


    @Test
    public void union() throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        // 数字
        DataStreamSource<Integer> numSource1 = env.fromElements(1,2,3,4,5,6,7,8,9);
        DataStreamSource<Integer> numSource2 = env.fromElements(111,22,33,4,4,5,22);
        DataStreamSource<Integer> numSource3 = env.fromElements(2,313,43,14,1);
        DataStreamSource<Integer> numSource4 = env.fromElements(4,14,314,31,4);


        // 合并
        DataStream<Integer> union = numSource1.union(numSource1).union(numSource2).union(numSource3).union(numSource4);


        union.print();


        env.execute();


    }


    @Test
    public void sum1() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<Integer> source = env.fromElements(1, 2, 3, 34, 431, 3, 234, 311);


        SingleOutputStreamOperator<Tuple2<String, Integer>> a = source.map(
                (MapFunction<Integer, Tuple2<String, Integer>>) value
                        -> Tuple2.of("a", value))
                .returns(Types.TUPLE(Types.STRING, Types.INT));

        KeyedStream<Tuple2<String, Integer>, String> keyBy = a.keyBy(
                (KeySelector<Tuple2<String, Integer>, String>) value -> value.f0);

        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = keyBy.sum(1);

        sum.print();

        env.execute();

    }


    @Test
    public void sum2() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<Integer> source = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8,9);


        SingleOutputStreamOperator<Integer> process = source.process(new ProcessFunction<Integer, Integer>() {
            int count = 0;

            @Override
            public void processElement(Integer value, Context ctx, Collector<Integer> out) throws Exception {
                count += value;
                out.collect(count);
            }
        }).setParallelism(1);

        process.print().setParallelism(1);


        env.execute();

    }


    @Test
    public void sum3() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);

        SingleOutputStreamOperator<UserBean> map = source.map((MapFunction<String, UserBean>) value
                -> new UserBean(value.split(",")))
                .returns(Types.POJO(UserBean.class));

        KeyedStream<UserBean, Integer> keyBy = map.keyBy(e -> e.getId());

        SingleOutputStreamOperator<UserBean> sum = keyBy.minBy("age");

        sum.print();

        env.execute();
    }


    @Test
    public void reduce() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<Integer> source = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8,9);

        SingleOutputStreamOperator<Tuple2<String, Integer>> map = source.map((MapFunction<Integer, Tuple2<String, Integer>>)
                value -> Tuple2.of("a", value)).returns(Types.TUPLE(Types.STRING,Types.INT));

        SingleOutputStreamOperator<Tuple2<String, Integer>> reduce = map.keyBy(s -> s.f0).reduce((ReduceFunction<Tuple2<String, Integer>>) (v1, v2) -> Tuple2.of("a", v1.f1 + v2.f1));

        reduce.print().setParallelism(1);

        env.execute();

    }


    @Test
    public void a(){
        String key = "b";
        int maxParallelism = 1 << 7;
        int parallelism = 16;

        int keyGroupId = MathUtils.murmurHash(key.hashCode()) % maxParallelism;

        int r = keyGroupId * parallelism / maxParallelism;

        System.out.println(r);

    }


    @Test
    public void rebalance() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 全局设置并发度
        env.setParallelism(3);

        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);

        source.print("print>>>");

        // 进行轮询
        source.rebalance().print("rebalance>>>");

        System.out.println("-----------------------");

        env.execute();

    }

    @Test
    public void rescale() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 全局设置并发度
        env.setParallelism(3);

        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);

        source.print("print>>>");

        // 进行轮询
        source.rescale().print("rebalance>>>");

        System.out.println("-----------------------");

        env.execute();

    }

}

