package com.me.bigdata;

import com.google.common.collect.Lists;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.net.URL;
import java.util.List;

/**
 * @Desc:
 * @DateTime: 2017/10/24 14:44
 * @Author chen.yihua
 * @Version 1.0
 */
public class JavaSubjectTeacherCount {
    public static void main(String[] args) {
        JavaSparkContext sc = new JavaSparkContext("local[*]", "JavaSubjectTeacherCount");
        JavaRDD<String> inputData = sc.textFile(args[0]);
        JavaRDD<Tuple2<String, String>> subjectAndTeacher = inputData.map(v -> {
            String host = new URL(v).getHost();
            String subject = host.substring(0, host.indexOf("."));
            String teacher = v.substring(v.lastIndexOf("/") + 1, v.length());
           return new Tuple2<>(subject, teacher);
        });
        JavaPairRDD<Tuple2<String, String>, Integer> javaPairRDD1 = subjectAndTeacher.mapToPair(v -> new Tuple2<>(v, 1));
        JavaPairRDD<Tuple2<String, String>, Integer> reducedRDD1 = javaPairRDD1.reduceByKey((a, b) -> (a + b));
        JavaPairRDD<String, Iterable<Tuple2<Tuple2<String, String>, Integer>>> grouped = reducedRDD1.groupBy(v -> v._1._1);
        JavaPairRDD<String, List<Tuple2<Tuple2<String, String>, Integer>>> sortedRDD = grouped.mapValues(it -> {
            List<Tuple2<Tuple2<String, String>, Integer>> list = Lists.newArrayList(it);
            list.sort((a, b) -> (b._2 - a._2));
            return list;
        });
        sortedRDD.saveAsTextFile(args[1]);
    }
}
