package com.dukk.espark.apps;

import com.alibaba.fastjson.JSONObject;
import com.dukk.espark.core.Engine;
import com.dukk.espark.core.SparkBase;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.Dataset;
import scala.Tuple2;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;


@Engine(name = "TopXApp")
public class TopXApp extends SparkBase {

    @Override
    public void doAi(JSONObject params) {

        JavaRDD<String> javaRDD = javaSparkContext.textFile("G:\\logs\\lllll.txt", 5);
        JavaPairRDD<Double, Integer> javaPairRDD = javaRDD.mapToPair((PairFunction<String,Double,Integer>) f->{
            return new Tuple2<Double, Integer>(Double.parseDouble(f), 1);
        });
        JavaPairRDD<Double, Integer> javaPairRDD1 = javaPairRDD.reduceByKey((Function2<Integer, Integer, Integer>)(x, y)->{
            return x+y;
        });



       List<Tuple2<Double, Integer>> javaPairRDDTop = javaPairRDD1.top(10, (Comparator<Tuple2<Double, Integer>>& Serializable)(f1, f2)->{

            int value1 = f1._2();
            int value2 = f2._2();
            if(value1 > value2){
                return 1;
            }else if(value1 == value2){
                return 0;
            }else {
                return -1;
            }
        });

       for (Tuple2<Double, Integer> tuple2 : javaPairRDDTop){
           System.out.println("key:"+ tuple2._1() + " value:" + tuple2._2() );
       }


    }
}
