package com.zheng.flink.study.dataset.transformation;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapPartitionFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;

/**
 * MapPartition: 在一个函数中(MapPartitionFunction)转换一个并行的分区
 * <p>
 * 在函数中得到一个可迭代的流(Iterable stream)并且可以生成任意数量的结果值
 * <p>
 * 元素在每个分区中的数量取决于并行度的设置和前序操作
 * <p>
 * <p>
 * Transforms a single parallel partition in a single function call.
 * The function gets the partition as an iterable stream and can produce
 * an arbitrary result values.The number of elements in each partition depends
 * on this degree-of-parallelism and previous operations
 *
 * @author zhengbo
 * @date 2019/12/12
 */
@Slf4j
public class MapPartitionTransformation {

    /**
     * 模拟数据库链接
     */
    public static class DBUtils {

        //获取数据库链接
        public static int getDBConnection() {
            return ThreadLocalRandom.current().nextInt(10);
        }

        /**
         * 归还链接
         *
         * @param connection
         */
        public static void returnDBConnection(int connection) {

            //log.info("returnDB connection");
        }
    }

    @Data
    @AllArgsConstructor
    @NoArgsConstructor
    public static class PersonEntity {

        private String name;

        private Integer age;

    }

    public static void main(String[] args) throws Exception {

        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        List<String> personList = new ArrayList<>();
        for (int i = 0; i < 100; i++) {
            personList.add("person:" + i);


        }

        List<PersonEntity> personEntityList = new ArrayList<>();
        personEntityList.add(new PersonEntity("zhangsan", 11));
        personEntityList.add(new PersonEntity("zhangsan", 11));
        personEntityList.add(new PersonEntity("zhangsan", 11));
        personEntityList.add(new PersonEntity("lisi", 12));
        personEntityList.add(new PersonEntity("lisi", 13));
        personEntityList.add(new PersonEntity("wangwu", 14));

        //如果使用map 这里需要获取100次数据库链接
        //        env.fromCollection(personList).map(new MapFunction<String, String>() {
        //            @Override
        //            public String map(String value) throws Exception {
        //                int connection = DBUtils.getDBConnection();
        //                log.info("connection = {}", connection);
        //                DBUtils.returnDBConnection(connection);
        //
        //                return value;
        //            }
        //        }).print();

        //使用MapPartitionFunction 获取数据库链接次数与设置并行度数量一致
//        env.fromCollection(personEntityList).setParallelism(2).mapPartition(new MapPartitionFunction<PersonEntity, String>() {
//            @Override
//            public void mapPartition(Iterable<PersonEntity> values, Collector<String> out) throws Exception {
//
//
//                int connection = DBUtils.getDBConnection();
//                log.info("connection = {}", connection);
//
//                for (PersonEntity value : values) {
//                    out.collect(value.toString());
//                }
//
//                DBUtils.returnDBConnection(connection);
//
//            }
//        }).print();

//
        //设置并行数 将数据源分配到不同的分区中执行
        env.fromCollection(personEntityList).setParallelism(3).mapPartition(
                new MapPartitionFunction<PersonEntity, Tuple2<String, Integer>>() {
                    @Override
                    public void mapPartition(Iterable<PersonEntity> values, Collector<Tuple2<String, Integer>> out) throws Exception {

                        //加了这一行 就没法collect了  待解决
                        //log.info("current partition deal values:{}", JSONObject.toJSONString(values));

                        for (PersonEntity value : values) {
                            out.collect(new Tuple2<>(value.getName(), value.getAge()));
                        }

                    }
                    //按名字分组 然后年龄求和
                }).groupBy(0).sum(1).print();

    }
}
