package com.hsj;

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;

import javax.ws.rs.client.Entity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

/**
 * 描述：基本的键值对RDD操作样例
 * 创建者： hansh
 * 创建日期：2018/10/31 15:52
 * 版本：1.0
 * 修改者：
 * 修改日期：
 */
public class BasicPairFlatMapValues {
    /**
     * 统计对象
     */
    public static class AvgCount implements Serializable{
        public int count_;
        public int num_;
        public AvgCount(int count,int num){
            count_=count;
            num_ = num;
        }
        public float avg(){return count_/(float)num_;}
    }

    public static void main(String args[]){

        String master;
        if (args.length > 0) {
            master = args[0];
        } else {
            master = "local";
        }

        JavaSparkContext sc = new JavaSparkContext(
                master, "basicavg", System.getenv("SPARK_HOME"), System.getenv("JARS"));
        JavaPairRDD<Integer,Integer> rdd = sc.parallelizePairs(Arrays.asList(new Tuple2(1,2), new Tuple2(3,4), new Tuple2(3,6)));
        //根据key求和
        JavaPairRDD<Integer,Integer> rbk = rdd.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer integer, Integer integer2) throws Exception {
                return integer+integer2;
            }
        });
        System.out.println("reduceByKey test result is:"+rbk.collect());
        //根据key分组
        JavaPairRDD<Integer,Iterable<Integer>> gbk = rdd.groupByKey();
        System.out.println("groupByKey test result is:"+gbk.collect());
        //求value平均值
        JavaPairRDD<Integer,AvgCount> cbk = rdd.combineByKey(new Function<Integer, AvgCount>() {
            @Override
            public AvgCount call(Integer integer) {
                return new AvgCount(integer, 1);
            }
        }, new Function2<AvgCount, Integer, AvgCount>() {
            @Override
            public AvgCount call(AvgCount avgCount, Integer integer) throws Exception {
                avgCount.count_+=integer;
                avgCount.num_+=1;
                return avgCount;
            }
        }, new Function2<AvgCount, AvgCount, AvgCount>() {
            @Override
            public AvgCount call(AvgCount avgCount, AvgCount avgCount2) throws Exception {
                avgCount.count_+=avgCount2.count_;
                avgCount.num_+=avgCount2.num_;
                return avgCount;
            }
        });
        cbk.foreach(new VoidFunction<Tuple2<Integer, AvgCount>>() {
            @Override
            public void call(Tuple2<Integer, AvgCount> integerAvgCountTuple2) throws Exception {
                System.out.println(integerAvgCountTuple2._1+" avg is:"+integerAvgCountTuple2._2.avg());
            }
        });
        //操作value值
        JavaPairRDD<Integer,Integer> rmk = rdd.mapValues(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer integer) throws Exception {
                return integer+1;
            }
        });
        System.out.println("mapValues test result is:"+rmk.collect());
        //对pairRDD中的每个值应用一个返回迭代器的函数，然后返回的每个元素生成一个对应原值key的键值对记录。通常用于符号化
        JavaPairRDD<Integer,Integer> fmv = rdd.flatMapValues(new Function<Integer, Iterable<Integer>>() {
            @Override
            public Iterable<Integer> call(Integer integer) throws Exception {
                List<Integer> list = new ArrayList<Integer>();
                for(int i=integer;i<6;i++){
                    list.add(i);
                }
                return list;
            }
        });
        System.out.println("flatMapValues test result is:"+fmv.collect());

    }
}
