package edu.nju.cloud;

import com.alibaba.fastjson2.JSON;
import edu.nju.cloud.entity.Root;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.text.SimpleDateFormat;
import java.util.Iterator;

public class Application {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("Application").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);
        String output = "file:///**";
        String input = "file:///**";
        JavaRDD<String> rdd = sc.textFile(input);
        JavaRDD<Root> filter = rdd.flatMap(new FlatMapFunction<String, Root>() {
            @Override
            public Iterator<Root> call(String s) throws Exception {
                return JSON.parseArray(s, Root.class).iterator();
            }
        }).filter(new Function<Root, Boolean>() {
            @Override
            public Boolean call(Root v1) throws Exception {
                return !StringUtils.isBlank(v1.getLanguage());
            }
        });
        JavaPairRDD<Integer, Root> rdd1 = filter.mapToPair(new PairFunction<Root, Integer, Root>() {
            @Override
            public Tuple2<Integer, Root> call(Root root) throws Exception {
                return new Tuple2<>(root.getCreated_at().getYear(), root);
            }
        });
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        rdd1.groupByKey().sortByKey(true).map(new Function<Tuple2<Integer, Iterable<Root>>, Integer>() {
            @Override
            public Integer call(Tuple2<Integer, Iterable<Root>> v1) throws Exception {
                v1._2.forEach(r -> {
                    System.out.printf("%s,%s,%s,%d,%d,%d,%d,%s,%s,%s\n",
                            r.getId(), r.getLanguage(), r.getFull_name(), r.getForks_count(), r.getStargazers_count(),
                            r.getWatchers_count(), r.getOpen_issues_count(), r.isHas_issues(), r.isHas_wiki(),
                            sdf.format(r.getCreated_at()));
                });
                return v1._1;
            }
        }).collect();
        //reduce.saveAsTextFile("");

    }
}
