package com.youbu.demo;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;


/**
 * 基于HDFS文件的数据统计
 * 数据文件“txt”的数据格式为，每行“用户ID，积分数”
 * @author sunfangwei
 **/
public class IntegralCountDemo {

	public static void main(String[] args) {
		String filePath = args[0];
		String saveFilePath = args[1];
		//设置应用名称
		SparkConf conf = new SparkConf().setAppName("jfcountx");
		conf.set("spark.network.timeout","300");
		//设置每个stage默认的task数量，一般为
		conf.set("spark.default.parallelism", "6");
		//设置序列化方式
		conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<String> input = sc.textFile(filePath);
		//数据格式转换成K,V，K为用户名，V为积分数
		JavaPairRDD<String, Integer> count2 = input.filter(line -> !line.trim().equals("")).mapPartitionsToPair(line2 -> {
			List<Tuple2<String, Integer>> list2 = new ArrayList<Tuple2<String, Integer>>();
			Tuple2<String, Integer> tx = null;
			while (line2.hasNext()) {
				String[] valx = line2.next().split(",");
				tx = new Tuple2<>(valx[0], Integer.parseInt(valx[1]));
				list2.add(tx);
			}
			return list2.iterator();
		}).persist(StorageLevel.MEMORY_AND_DISK_SER());
		//将相用户的积分进行累加
		JavaPairRDD<String, Integer> count3 = count2.reduceByKey((a, b) -> a + b).persist(StorageLevel.MEMORY_AND_DISK_SER());
		//将K，V对调,进行排序
		JavaPairRDD<Integer, String> count4	= count3.mapPartitionsToPair(linex->{
			List<Tuple2<Integer, String>> list2 = new ArrayList<Tuple2<Integer, String>>();
			while (linex.hasNext()) {
				Tuple2<String,Integer> tx= linex.next();
				list2.add(new Tuple2<Integer, String>(tx._2,tx._1));
			}
			return list2.iterator();
		}).sortByKey();
		//保存结果
		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
		count4.coalesce(100).saveAsTextFile(saveFilePath + "/" + sdf.format(new Date()));
		sc.close();
	}
}
