package com.digiwin.muke;

import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.operators.Order;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.List;

/**
 * @Author yanggld
 * @Date 2021/04/25-16:26
 */
public class DataSetTransformationApp {

    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

//        mapFunction(env);
        richMapFunction(env);
//        filterFunction(env);
//        mapPartitionFunction(env);
//        firstFunction(env);
//        flatMapFunction(env);
//        distinctFunction(env);
//        joinFunction(env);
//        outerJoinFunction(env);
//        crossFunction(env);
    }

    // map算子  1对1
    public static void mapFunction(ExecutionEnvironment env) throws Exception {
        List<Integer> list = new ArrayList<>();
        for (int i = 0; i < 10; i++) {
            list.add(i);
        }
        DataSource<Integer> data = env.fromCollection(list);
        data.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer input) throws Exception {
                return input * input;
            }
        }).print();
    }

    // map算子  1对1
    public static void richMapFunction(ExecutionEnvironment env) throws Exception {
        List<Integer> list = new ArrayList<>();
        for (int i = 0; i < 10; i++) {
            list.add(i);
        }
        DataSource<Integer> data = env.fromCollection(list);
        data.map(new RichMapFunction<Integer, Integer>() {
            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                System.out.println("-------map之前。启动一次，开启资源-------");
            }

            @Override
            public void close() throws Exception {
                super.close();
                System.out.println("-------关闭资源-------");
            }

            @Override
            public Integer map(Integer input) throws Exception {
                return input * input;
            }
        }).print();
    }



    // filter算子 数据过滤
    public static void filterFunction(ExecutionEnvironment env) throws Exception {
        List<Integer> list = new ArrayList<>();
        for (int i = 0; i < 10; i++) {
            list.add(i);
        }
        DataSource<Integer> data = env.fromCollection(list);
        data.map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer input) throws Exception {
                return input * input;
            }
        }).filter(new FilterFunction<Integer>() {
            @Override
            public boolean filter(Integer input) throws Exception {
                return input > 40;
            }
        }).print();
    }

    // mapPartition算子  ---> 适合创建数据库连接
    // 使用MapPartitions操作之后，一个task(Parallelism)仅仅会执行一次function，function一次接收所有的partition数据。
    public static void mapPartitionFunction(ExecutionEnvironment env) throws Exception {
        List<Integer> list = new ArrayList<>();
        for (int i = 0; i < 10; i++) {
            list.add(i);
        }
        DataSource<Integer> data = env.fromCollection(list);
        data.mapPartition(new MapPartitionFunction<Integer, String>() {
            @Override
            public void mapPartition(Iterable<Integer> inputs, Collector<String> collector) throws Exception {
                inputs.forEach(p->System.out.println("输入的数据="+p));
                System.out.println("mapPartition的逻辑============================");
            }
        }).setParallelism(2).print();
    }



    /**
     * first算子
     * 返回结果：
     * (1,haddop)
     * (1,spark)
     * (1,flink)
     * -------------------------------------
     * (1,haddop)
     * (1,spark)
     * (2,java)
     * (2,springboot)
     * ++++++++++++++++++++++++++++++++++++++++
     * (1,spark)
     * (1,haddop)
     * (2,vue)
     * (2,springboot)
     * @param env
     * @throws Exception
     */
    public static void firstFunction(ExecutionEnvironment env) throws Exception {
        List<Tuple2<Integer,String>> list = new ArrayList<>();
        list.add(new Tuple2(1,"haddop"));
        list.add(new Tuple2(1,"spark"));
        list.add(new Tuple2(1,"flink"));
        list.add(new Tuple2(2,"java"));
        list.add(new Tuple2(2,"springboot"));
        list.add(new Tuple2(2,"linux"));
        list.add(new Tuple2(2,"vue"));
        DataSource<Tuple2<Integer,String>> data = env.fromCollection(list);
        data.first(3).print();
        System.out.println("-------------------------------------");
        data.groupBy(0).first(2).print();
        System.out.println("++++++++++++++++++++++++++++++++++++++++");
        data.groupBy(0).sortGroup(1, Order.DESCENDING).first(2).print();
    }

    // flatMap算子
    public static void flatMapFunction(ExecutionEnvironment env) throws Exception {
        List<String> list = new ArrayList<>();
        list.add("hadoop,spark");
        list.add("hadoop,flink");
        list.add("flink,flink");
        DataSource<String> data = env.fromCollection(list);
        data.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String input, Collector<String> collector) throws Exception {
                String[] split = input.split(",");
                for (String s : split) {
                    collector.collect(s);
                }
            }
        }).map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String s) throws Exception {
                return new Tuple2<>(s,1);
            }
        }).groupBy(0).sum(1).print();
    }

    // distinct算子
    public static void distinctFunction(ExecutionEnvironment env) throws Exception {
        List<String> list = new ArrayList<>();
        list.add("hadoop,spark");
        list.add("hadoop,flink");
        list.add("flink,flink");
        DataSource<String> data = env.fromCollection(list);
        data.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String input, Collector<String> collector) throws Exception {
                String[] split = input.split(",");
                for (String s : split) {
                    collector.collect(s);
                }
            }
        }).distinct().print();
    }


    /**
     * join算子 关联两个数据源生成一个结果：
     * (1,张三,张三)
     * (2,李四,李四)
     * (3,王五,王五)
     * @param env
     * @throws Exception
     */
    public static void joinFunction(ExecutionEnvironment env) throws Exception {
        List<Tuple2<Integer,String>> list = new ArrayList<>();
        list.add(new Tuple2<>(1,"张三"));
        list.add(new Tuple2<>(2,"李四"));
        list.add(new Tuple2<>(3,"王五"));

        List<Tuple2<Integer,String>> list2 = new ArrayList<>();
        list2.add(new Tuple2<>(1,"南京"));
        list2.add(new Tuple2<>(2,"北京"));
        list2.add(new Tuple2<>(3,"东京"));

        DataSource<Tuple2<Integer,String>> data = env.fromCollection(list);
        DataSource<Tuple2<Integer,String>> data2 = env.fromCollection(list2);
        data.join(data2).where(0).equalTo(0).with(new JoinFunction<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple3<Integer,String,String>>() {
            @Override
            public Tuple3<Integer, String, String> join(Tuple2<Integer, String> input1, Tuple2<Integer, String> input2) throws Exception {
                return new Tuple3<>(input1.f0,input1.f1,input2.f1);
            }
        }).print();
    }


    /**
     * join算子 关联两个数据源生成一个结果：
     * 左外连接数据结果：
     (1,张三,南京)
     (2,李四,-)
     (3,王五,北京)
     * 右外连接数据结果：
     (1,张三,南京)
     (4,-,东京)
     (3,王五,北京)
     * @param env
     * @throws Exception
     */
    public static void outerJoinFunction(ExecutionEnvironment env) throws Exception {
        List<Tuple2<Integer,String>> list = new ArrayList<>();
        list.add(new Tuple2<>(1,"张三"));
        list.add(new Tuple2<>(2,"李四"));
        list.add(new Tuple2<>(3,"王五"));

        List<Tuple2<Integer,String>> list2 = new ArrayList<>();
        list2.add(new Tuple2<>(1,"南京"));
        list2.add(new Tuple2<>(3,"北京"));
        list2.add(new Tuple2<>(4,"东京"));

        DataSource<Tuple2<Integer,String>> data = env.fromCollection(list);
        DataSource<Tuple2<Integer,String>> data2 = env.fromCollection(list2);
        data.leftOuterJoin(data2).where(0).equalTo(0).with(new JoinFunction<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple3<Integer,String,String>>() {
            @Override
            public Tuple3<Integer, String, String> join(Tuple2<Integer, String> input1, Tuple2<Integer, String> input2) throws Exception {
                if (input1 == null) {
                    return new Tuple3<>(input2.f0,"-",input2.f1);
                }
                if (input2 == null) {
                    return new Tuple3<>(input1.f0,input1.f1,"-");
                }
                return new Tuple3<>(input1.f0,input1.f1,input2.f1);
            }
        }).print();
    }

    /**
     * join算子 关联两个数据源生成一个结果：
     (张三,南京)
     (张三,北京)
     (李四,南京)
     (李四,北京)
     * @param env
     * @throws Exception
     */
    public static void crossFunction(ExecutionEnvironment env) throws Exception {
        List<String> list = new ArrayList<>();
        list.add("张三");
        list.add("李四");

        List<String> list2 = new ArrayList<>();
        list2.add("南京");
        list2.add("北京");

        DataSource<String> data = env.fromCollection(list);
        DataSource<String> data2 = env.fromCollection(list2);

        data.cross(data2).print();
    }
}
