package com.navinfo.platform.dataanalysis.service.impl;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.navinfo.location.mileage.util.JsonUtil;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.utils.StatisticsDef;
import com.navinfo.platform.dataanalysis.dto.VehicleBaseStatisticsDay;
import com.navinfo.platform.dataanalysis.dto.VehicleBaseStatisticsWeekOrMonth;
import com.navinfo.platform.dataanalysis.service.IStatisticsWeekOrMonthService;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * 周月统计信息处理
 *
 * @author wangshuai
 */
public enum WeekAndMonthStatisticsService implements IDataHandleService {
    //
    INSTANCE;
    private IStatisticsWeekOrMonthService instance = StatisticsWeekOrMonthService.INSTANCE;

    /**
     * 处理方法
     *
     * @param obj       Dataset or JavaRDD
     * @param configMap 配置文件
     * @param spark     spark
     */
    @SuppressWarnings("unchecked")
    @Override
    public void handle(List<Object> obj, Map<String, String> configMap, SparkSession spark) {
        Dataset<Row> data = (Dataset<Row>) obj.get(0);
        JavaPairRDD<Long, List<VehicleBaseStatisticsDay>> pairRDD = instance.convert(data, configMap, spark);
        JavaPairRDD<Long, VehicleBaseStatisticsWeekOrMonth> rdd = instance.handle(pairRDD, configMap, spark);
        JavaRDD<VehicleBaseStatisticsWeekOrMonth> javaRDD = rdd.mapPartitions(
                new FlatMapFunction<Iterator<Tuple2<Long, VehicleBaseStatisticsWeekOrMonth>>, VehicleBaseStatisticsWeekOrMonth>() {
                    @Override
                    public Iterator<VehicleBaseStatisticsWeekOrMonth> call(Iterator<Tuple2<Long, VehicleBaseStatisticsWeekOrMonth>> tuple2Iterator) throws Exception {
                        return new Iterator<VehicleBaseStatisticsWeekOrMonth>() {
                            @Override
                            public boolean hasNext() {
                                return tuple2Iterator.hasNext();
                            }

                            @Override
                            public VehicleBaseStatisticsWeekOrMonth next() {
                                VehicleBaseStatisticsWeekOrMonth weekOrMonth = tuple2Iterator.next()._2;
                                try {
                                    System.out.println(JsonUtil.toJson(weekOrMonth));
                                } catch (JsonProcessingException e) {
                                    e.printStackTrace();
                                }
                                return weekOrMonth;
                            }
                        };
                    }
                }
        );
        String savePath = configMap.get("hdfs.statistics.week.path");
        Calendar calendar = Calendar.getInstance();
        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
        Date date = new Date();
        try {
            date = sdf.parse(configMap.get(ICommonService.DAY_VALUE));
        } catch (ParseException e) {
            e.printStackTrace();
        }
        calendar.setTime(date);
        calendar.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
        String dateStr = sdf.format(calendar.getTime());
        if (StatisticsDef.MONTH.equals(configMap.get(ICommonService.PARTITION_NUM_VALUE))) {
            savePath = configMap.get("hdfs.statistics.month.path");
            try {
                date = sdf.parse(configMap.get(ICommonService.DAY_VALUE));
            } catch (ParseException e) {
                e.printStackTrace();
            }
            calendar.setTime(date);
            calendar.set(Calendar.DAY_OF_MONTH, 1);
            dateStr = sdf.format(calendar.getTime());
        }
        //保存至hdfs
        Dataset<Row> dataFrame = spark.createDataFrame(javaRDD, VehicleBaseStatisticsWeekOrMonth.class);
        dataFrame.select(VehicleBaseStatisticsWeekOrMonth.monthColumns()).repartition(1).write().mode(SaveMode.Overwrite).parquet(String.format(savePath, dateStr));
    }
}
