package com.softfly.bigdata.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import scala.Int;
import scala.Tuple2;

import java.util.Iterator;
import java.util.Map;

public class GroupByDemo {

    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("wordcount1");
        JavaSparkContext sc = new JavaSparkContext(conf);
        String tb1Path = "file:///d:/caocao/spark-demo/expr/tb1.txt";
        JavaPairRDD<String, Integer> pairRDD = sc.textFile(tb1Path).mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                String[] arr = s.split(",");
                return new Tuple2(arr[0], Integer.valueOf(arr[1]));
            }
        });
        JavaPairRDD<String,Integer> pairRDD1 = pairRDD.repartition(3);
        System.out.println(pairRDD.partitions().size());
        System.out.println(pairRDD1.partitions().size());
//        Map<String,Long> countMap =  pairRDD.countByKey();
//        System.out.println(countMap);
//        Thread.sleep(Integer.MAX_VALUE);
    }

}
