package spark.Task1;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

public class Task1_2 {
    public static void main( String[] args )
    {
        SparkConf conf = new SparkConf().setAppName("Task1").setMaster("local");
        try (JavaSparkContext task1_2 = new JavaSparkContext(conf)) {
            //读取csv
            JavaRDD<String> lines = task1_2.textFile("/home/jinhaitao/stock_small.csv");

            //将一行转化为键值对，键为要求输出的前五项，值为差值
            JavaPairRDD<String, Float> KeyValuePair = lines.mapToPair(new PairFunction<String, String, Float>() {
                @Override
                public Tuple2<String, Float> call(String line) throws Exception {
                    String[] columns = line.split(",");
                    String key =  columns[0]+" "+columns[1]+" "+columns[2]+" "+columns[6]+" "+columns[3];
                    Float value = Float.parseFloat(columns[6])-Float.parseFloat(columns[3]);
                    Tuple2<String, Float> keyvalue = new Tuple2<>(key, value);
                    return keyvalue;
                }
            });

            //排序
            JavaPairRDD<String, Float> SortPair = KeyValuePair.mapToPair(row -> new Tuple2<Float,String>(row._2,row._1))      //键值换位
                                                                                        .sortByKey(false)                                          //对键倒序排
                                                                                        .mapToPair(row -> new Tuple2<String, Float>(row._2,row._1));                      //换位回来
    
            // 取前十个
            JavaRDD<Tuple2<String, Float>> result = task1_2.parallelize(SortPair.take(10));
            result.saveAsTextFile("/home/jinhaitao/result/Task1_2");

            task1_2.stop();
        }
        
    }
}
