package org.flink.api;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.List;

/**
 * @Author: TongRui乀
 * @Date: 2020/11/2 21:52
 * @description： 测试 DataSet 和 dataStream的Api
 */
public class ApiTest {

    private static StreamExecutionEnvironment environment;
    private static DataStreamSource<Item> streamSource;

    static {
        // 创建环境
        environment = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置数据源
        streamSource = environment.addSource(new MyStreamSource()).setParallelism(1);
    }

    /**
     * public <R> SingleOutputStreamOperator<R> map(MapFunction<T, R> mapper)
     * <p>
     * 入参 T  返回  R
     * 对传入的参数进行处理，返回我们期望的数据
     */
    private static void testApiMap() throws Exception {

        // 这里我们在map中只取了 item的name字符串属性
        SingleOutputStreamOperator<String> osOperator = streamSource.map(Item::getName);
        // 打印
        osOperator.printToErr();

        environment.execute("print source");

    }

    /**
     * 我们可以重写map方法
     */
    private static void testApiSelfMap() throws Exception {
        // 这里我们在map中只取了 item的name字符串属性
        SingleOutputStreamOperator<String> osOperator = streamSource.map(new MapFunction<Item, String>() {
            @Override
            public String map(Item item) throws Exception {
                return item.getName();
            }
        });
        // 打印
        osOperator.printToErr();

        environment.execute("print source");
    }


    /**
     * public <R> SingleOutputStreamOperator<R> flatMap(FlatMapFunction<T, R> flatMapper)
     * <p>
     * ??????????
     */
    private static void testApiFlatMap() throws Exception {

        SingleOutputStreamOperator<String> outputStreamOperator = streamSource.flatMap(new FlatMapFunction<Item, String>() {
            @Override
            public void flatMap(Item item, Collector<String> collector) throws Exception {
                collector.collect(item.getName());
            }
        });

        outputStreamOperator.printToErr();

        environment.execute("print source");
    }

    /**
     * 过滤数据
     * public SingleOutputStreamOperator<T> filter(FilterFunction<T> filter)
     */
    private static void testApiFilter() throws Exception {

        streamSource.<Item>filter(item -> item.getId() % 2 == 0).printToErr();

        environment.execute("print source");

    }

    /**
     *  keyBy 类似于 group by  分组操作
     */
    private static void testApiKeyBy() {



    }

    /**
     *  测试聚合函数
     * @throws Exception
     */
    private static void testApiAgg() throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //获取数据源
        List<Tuple3<Integer, Integer, Integer>> data = new ArrayList<>();
        data.add(new Tuple3<>(0, 1, 0));
        data.add(new Tuple3<>(0, 1, 1));
        data.add(new Tuple3<>(0, 2, 2));
        data.add(new Tuple3<>(0, 1, 3));
        data.add(new Tuple3<>(1, 2, 5));
        data.add(new Tuple3<>(1, 2, 9));
        data.add(new Tuple3<>(1, 2, 11));
        data.add(new Tuple3<>(1, 2, 13));
        DataStreamSource items = env.fromCollection(data);
        items.keyBy(0).max(2).printToErr();
        //打印结果
        String jobName = "user defined streaming source";
        env.execute(jobName);

    }

    /**
     *  计算
     */
    private static void testApiReduce(){
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        List<Tuple3<Integer,Integer,Integer>> data = new ArrayList<>();
        data.add(new Tuple3<>(0,1,0));
        data.add(new Tuple3<>(0,1,1));
        data.add(new Tuple3<>(0,2,2));
        data.add(new Tuple3<>(0,1,3));
        data.add(new Tuple3<>(1,2,5));
        data.add(new Tuple3<>(1,2,9));
        data.add(new Tuple3<>(1,2,11));
        data.add(new Tuple3<>(1,2,13));
        DataStreamSource<Tuple3<Integer,Integer,Integer>> items = env.fromCollection(data);
        //items.keyBy(0).max(2).printToErr();
        SingleOutputStreamOperator<Tuple3<Integer, Integer, Integer>> reduce = items.keyBy(0).reduce((ReduceFunction<Tuple3<Integer, Integer, Integer>>) (t1, t2) -> {
            Tuple3<Integer,Integer,Integer> newTuple = new Tuple3<>();
            newTuple.setFields(0,0,(Integer)t1.getField(2) + (Integer) t2.getField(2));
            return newTuple;
        });
        reduce.printToErr().setParallelism(1);


    }


    public static void main(String[] args) throws Exception {

//        testApiMap();
//        testApiSelfMap();
//        testApiFlatMap();
//        testApiFilter();
//        testApiAgg();
        testApiReduce();
    }

}
