package com.example.demo.controller;


import com.example.demo.componet.ReadCSV;
import com.example.demo.model.SaleJson;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import scala.Tuple2;

import javax.annotation.Resource;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

@RestController
@RequestMapping("/analyze")
public class SaleTotalController implements Serializable {

    @Resource
    private JavaSparkContext sc;

    //文件地址
    @Autowired
    private String getFilePath;

    //
    @GetMapping("/consumption")
    public SaleJson getTotal(){
        ReadCSV readCSV = new ReadCSV();
        List<String> columnList = readCSV.getColumnList(getFilePath, 7);   //"C:\\Users\\fireapproval\\Desktop\\毕业设计\\数据集\\test.csv"
        columnList.remove(0);

        //System.out.println(columnList);
        List<Double> doubles = new ArrayList<>();
        for (String str:columnList
             ) {
            doubles.add(Double.parseDouble(str));
        }
        //System.out.println(doubles);

        //System.out.println(doubles.get(2).getClass());

        //内存中加载数据
        JavaRDD<Double> rdd = sc.parallelize(doubles);

        Double reduce = rdd.reduce((Function2<Double, Double, Double>) (aDouble, aDouble2) -> aDouble + aDouble2);


        //System.out.println(reduce);

        SaleJson saleJson = new SaleJson();
        saleJson.setTotal(reduce);
        return saleJson;
    }
}
