package com.navinfo.platform.dataanalysis.service.impl;

import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHdfs;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.utils.DateUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * 加载事件日统计信息
 * @author  web 20191203
 */
public class EventDataLoadService implements ILoadDataService {
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;
    private ILoadDataChannel hdfsChannel = LoadDataFromHdfs.INSTANCE;

    @Override
    public List loadData(SparkSession spark, Map<String, String> configMap) {
        String day = configMap.get("args_1");
        String type = configMap.get("args_2");
        String hql = configMap.get("hive.event.hql");

        Tuple2<String, String> startEndDay = DateUtils.getStartEndDay(day, type);
        hql = String.format(hql, startEndDay._1, startEndDay._2);
        System.out.println("查询事件日统计信息的day="+ day +", type="+ type +", hql=" +hql);

        Dataset<Row> dataSet = null;
        long start = System.currentTimeMillis();
        if ("local".equals(configMap.get("run.env"))) {
            String eventPath = configMap.get("hive.event.path");
            DateTimeFormatter df = DateTimeFormatter.ofPattern("yyyyMMdd");
            LocalDate startDate = LocalDate.parse(startEndDay._1, df);
            LocalDate endDate = LocalDate.parse(day, df);

            List<Dataset<Row>> rowList = Lists.newArrayList();
            for(int i=0; !startDate.plusDays(i).isAfter(endDate); i++){
                try {
                    //查询每个天分区的数据文件
                    rowList.add(hdfsChannel.readData(spark, String.format(eventPath, df.format(startDate.plusDays(i)))));
                }catch (Exception e){
                    System.out.println("加载"+String.format(eventPath, df.format(startDate.plusDays(i)))+"数据失败...");
                }
            }
            if(rowList.size() > 0){
                Dataset<Row> rowDataset = rowList.get(0);
                for(int i=1; i<rowList.size(); i++){
                    rowDataset.union(rowList.get(i));
                }
                rowDataset.createOrReplaceTempView(hql.substring(hql.indexOf("from") + 4, hql.indexOf("where")).trim());
                dataSet = spark.sql(hql.substring(0, hql.indexOf("where")));
            }
        } else {
            dataSet = hiveChannel.readData(spark, hql);
        }
        System.out.println("加载事件日统计信息，耗时"+ (System.currentTimeMillis()-start));
        return Arrays.asList(dataSet);
    }
}
