package com.sheep.spark.init;

import com.sheep.spark.util.ConfSupport;
import com.sheep.spark.util.SparkSupport;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;

public class InitWordCountJob implements Serializable {
    private static final Logger logger = Logger.getLogger(InitWordCountJob.class);

    private static final Pattern SPACE = Pattern.compile(" ");

    public InitWordCountJob() {
        run();
    }

    private void run() {
        try {
            String fp = ConfSupport.getInitHDFSFile();
            JavaSparkContext cxt = SparkSupport.getJavaSparkContext();
            JavaRDD<String> lines = cxt.textFile(fp, 1);
            JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
                @Override
                public Iterable<String> call(String s) {
                    return Arrays.asList(SPACE.split(s));
                }
            });
            JavaPairRDD<String, Integer> ones = words.mapToPair(
                    new PairFunction<String, String, Integer>() {
                        @Override
                        public Tuple2<String, Integer> call(String s) {
                            return new Tuple2<String, Integer>(s, 1);
                        }
                    });

            JavaPairRDD<String, Integer> counts = ones.reduceByKey(
                    new Function2<Integer, Integer, Integer>() {
                        @Override
                        public Integer call(Integer i1, Integer i2) {
                            return i1 + i2;
                        }
                    });

            List<Tuple2<String, Integer>> output = counts.collect();
            for (Tuple2<?, ?> tuple : output) {
                logger.info("init word count:" + tuple._1() + ": " + tuple._2());
            }
        }   catch (Exception e){
            logger.error(e);
        }

    }
//
//    public static void main(String[] args) throws Exception {
//        PropertyConfigurator.configure(System.getProperty("user.dir")
//                + "/conf/log4j.properties");
//        System.setProperty("hadoop.home.dir", "D:\\hadoop-2.6.4");
//        System.setProperty("HADOOP_USER_NAME", "root");
//        String fp = "hdfs://192.168.241.38:9000/ADSB1/2016/04/06/B-1873#CA8232-m-00000";
//        SparkConf sparkConf = new SparkConf()
//                .setMaster("spark://192.168.241.38:7077")
//                .setJars(new String[] {"E:\\git_space\\SheepCloud\\cloudSpark\\classes\\artifacts\\cloudSpark_jar\\cloudSpark.jar"})
//                .setAppName("JavaWordCount")
//                ;
//        JavaSparkContext ctx = new JavaSparkContext(sparkConf);
//
//        JavaRDD<String> lines = ctx.textFile(fp, 1);
//
////      JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
////          @Override
////          public Iterable<String> call(String s) throws Exception {
////              System.out.println(s);
////              return null;  //To change body of implemented methods use File | Settings | File Templates.
////          }
////      });
//        JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
//            @Override
//            public Iterable<String> call(String s) {
//                return Arrays.asList(SPACE.split(s));
//            }
//        });
//
//        JavaPairRDD<String, Integer> ones = words.mapToPair(
//                new PairFunction<String, String, Integer>() {
//                    @Override
//                    public Tuple2<String, Integer> call(String s) {
//                        return new Tuple2<String, Integer>(s, 1);
//                    }
//                });
//
//        JavaPairRDD<String, Integer> counts = ones.reduceByKey(
//                new Function2<Integer, Integer, Integer>() {
//                    @Override
//                    public Integer call(Integer i1, Integer i2) {
//                        return i1 + i2;
//                    }
//                });
//
//        List<Tuple2<String,Integer>> output = counts.collect();
//        for (Tuple2<?,?> tuple : output) {
//            System.out.println(tuple._1() + ": " + tuple._2());
//            logger.error(tuple._1() + ":yb: " + tuple._2());
//        }
//        ctx.stop();
//    }
}
