package com.navinfo.platform.dataanalysis.service.impl;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.service.impl.LoadDataFromMysql;
import com.navinfo.platform.common.utils.StatisticsDef;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

/***
 * 加载按天统计数据
 * @date 2018-12-01
 * @author wangshuai
 */
public enum StatisticsLoadDataService implements ILoadDataService {
    /**
     *
     */
    INSTANCE;
    static final Logger LOGGER = LoggerFactory.getLogger(StatisticsLoadDataService.class);
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;
    private ILoadDataChannel mysqlChannel = LoadDataFromMysql.INSTANCE;

    /**
     * 获取按天统计数据方法
     *
     * @param spark     spark
     * @param configMap 配置文件
     * @return 按天统计数据
     */
    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String env = configMap.get("run.env");
        String type = configMap.get(ICommonService.PARTITION_NUM_VALUE);
        String day = configMap.get(ICommonService.DAY_VALUE);
        String hql = configMap.get("hdfs.statistics.hql");
        String hqlPath = configMap.get("hdfs.statistics.day.path");
        List<String> realPaths = null;
        Calendar calendar = Calendar.getInstance();
        Date date = new Date();
        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
        try {
            date = sdf.parse(day);
        } catch (ParseException e) {
            e.printStackTrace();
        }
        calendar.setTime(date);
        //确定时间范围
        if (StatisticsDef.WEEK.equals(type)) {
            calendar.setFirstDayOfWeek(Calendar.MONDAY);
            calendar.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
            String monday = sdf.format(calendar.getTime());
            calendar.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
            String sunday = sdf.format(calendar.getTime());
            hql = String.format(hql, monday, sunday);
            realPaths = getRealPath(monday, sunday, hqlPath);
        } else if (StatisticsDef.MONTH.equals(type)) {
            calendar.set(Calendar.DAY_OF_MONTH, 1);
            String first = sdf.format(calendar.getTime());
            int maxCurrentMonthDay = calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
            calendar.set(Calendar.DAY_OF_MONTH, maxCurrentMonthDay);
            String last = sdf.format(calendar.getTime());
            hql = String.format(hql, first, last);
            realPaths = getRealPath(first, last, hqlPath);
        }
        System.out.println(hql);
        Dataset<Row> statisticsDataSet = null;
        if (StringUtils.equals(env, "local")) {
            if(realPaths.size() > 0){
                //获取按天统计数据
                statisticsDataSet = spark.read().parquet(realPaths.toArray(new String[realPaths.size()]));
            }else{
                return null;
            }
        } else {
            statisticsDataSet = hiveChannel.readData(spark, hql);
        }
//        String table = configMap.get("vehicle.sql");
//        String type = configMap.get(ICommonService.PARTITION_NUM_VALUE);
//        String day = configMap.get(ICommonService.DAY_VALUE);
//        Calendar calendar = Calendar.getInstance();
//        Date date = new Date();
//        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
//        try {
//            date = sdf.parse(day);
//        } catch (ParseException e) {
//            e.printStackTrace();
//        }
//        calendar.setTime(date);
//        if (StatisticsDef.WEEK.equals(type)) {
//            calendar.setFirstDayOfWeek(Calendar.MONDAY);
//            calendar.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
//            String monday = sdf.format(calendar.getTime());
//            calendar.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
//            String sunday = sdf.format(calendar.getTime());
//            table = table.replace("{0}", monday).replace("{1}", sunday);
//        } else if (StatisticsDef.MONTH.equals(type)) {
//            calendar.set(Calendar.DAY_OF_MONTH, 1);
//            String first = sdf.format(calendar.getTime());
//            int maxCurrentMonthDay = calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
//            calendar.set(Calendar.DAY_OF_MONTH, maxCurrentMonthDay);
//            String last = sdf.format(calendar.getTime());
//            table = table.replace("{0}", first).replace("{1}", last);
//        }
//        System.out.println(table);
//        Map<String, String> mysqlConfig = new HashMap<>();
//        mysqlConfig.put("url", configMap.get("mysql.url"));
//        mysqlConfig.put("tablename", table);
//        mysqlConfig.put("user", configMap.get("mysql.username"));
//        mysqlConfig.put("password", configMap.get("mysql.password"));
//        Dataset<Row> statisticsDataSet = mysqlChannel.readData(spark, mysqlConfig);
        return Arrays.asList(statisticsDataSet);
    }

    /**
     * 获取要查询的文件路径集合
     *
     * @param start    开始日期（如：20181128）
     * @param end      结束日期（如：20181130）
     * @param basePath hdfs地址
     * @return 路径集合
     */
    public static List<String> getRealPath(String start, String end, String basePath) {
        List<String> list = new ArrayList<>(31);
        Date startDate = null;
        Date endDate = null;
        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
        try {
            startDate = sdf.parse(start);
            endDate = sdf.parse(end);
        } catch (ParseException e) {
            e.printStackTrace();
        }
        Calendar ca = Calendar.getInstance();
        Date curDate = startDate;
        while (curDate.compareTo(endDate) <= 0) {
            ca.setTime(curDate);
            FileSystem fs = null;
            try {
                String dataPathStr = String.format(basePath, sdf.format(ca.getTime()));
                fs = FileSystem.get(new java.net.URI(dataPathStr), new org.apache.hadoop.conf.Configuration());
                Path dataPath = new Path(dataPathStr);
                if (fs.exists(dataPath)) {
                    list.add(dataPathStr);
                } else {
                    LOGGER.info("{} is not exist", dataPathStr);
                }
            } catch (IOException e) {
                e.printStackTrace();
            } catch (URISyntaxException e) {
                e.printStackTrace();
            }
            ca.add(Calendar.DATE, 1);
            curDate = ca.getTime();
        }
        return list;
    }
}
