import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Serializable;
import scala.Tuple2;


public class KickRDD implements Serializable{
    public static void main(String[] args) {
        System.out.println("read from hadoopmaster");
        SparkConf conf=new SparkConf()
                .setAppName("kickstartlog")
                .setMaster("spark://sparkmaster:7077");

        JavaSparkContext context = new JavaSparkContext(conf);
        JavaRDD<String> rdd = context.textFile("hdfs://hadoopmaster:9000/user/flume/nginxlog/2018-16-10.1521195521934.nginxlog");
        System.out.println("rdd is:"+rdd);
        System.out.println("count:"+rdd.count());

        JavaPairRDD<String,AccessLogInfo> infoJavaPairRDD= rddToPair(rdd);
//        System.out.println("infoJavaPairRDD is:"+rdd);
//        System.out.println("infoJavaPairRDD count:"+infoJavaPairRDD.count());

        JavaPairRDD<String,AccessLogInfo> aggregaterdd = aggregate(infoJavaPairRDD);
        System.out.println("aggregaterdd is:"+aggregaterdd);
        System.out.println("aggregaterdd count:"+aggregaterdd.count());
        //        rdd.take(10).forEach(println(_));

        context.close();

    }

    private static JavaPairRDD<String,AccessLogInfo> rddToPair(JavaRDD<String> rdd){
        return rdd.mapToPair(new PairFunction<String, String, AccessLogInfo>() {
            @Override
            public Tuple2<String, AccessLogInfo> call(String s) throws Exception {
                String[] oneline=s.split("\t");
                Long timestamp = Long.valueOf(oneline[0]);
                Long uptraffic = Long.valueOf(oneline[2]);
                Long downtraffic = Long.valueOf(oneline[3]);
                String deviceid = oneline[1];
                return new Tuple2<String, AccessLogInfo>(deviceid,new AccessLogInfo(timestamp,uptraffic,downtraffic));
            }
        });
    }

    private static JavaPairRDD<String,AccessLogInfo> aggregate(JavaPairRDD<String,AccessLogInfo> pair){
        return pair.reduceByKey(new Function2<AccessLogInfo, AccessLogInfo, AccessLogInfo>() {
            @Override
            public AccessLogInfo call(AccessLogInfo accessLogInfo, AccessLogInfo accessLogInfo2) throws Exception {
                AccessLogInfo info =new AccessLogInfo(accessLogInfo.getTimestamp()+accessLogInfo2.getTimestamp()
                        ,accessLogInfo.getUpTraffic()+accessLogInfo2.getUpTraffic()
                        ,accessLogInfo.getDownTraffic()+accessLogInfo2.getDownTraffic());

                return info;
            }
        });
       }

}
