package com.navinfo.platform.basicdatastatistics.service.week;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.utils.DateUtils;
import com.navinfo.platform.common.utils.StatisticsDef;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public enum BasicDataWeekLoadDataService implements ILoadDataService {
    //
    INS;
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String day = configMap.get(ICommonService.DAY_VALUE);
        Tuple2<String, String> tuple2 = DateUtils.getStartEndDay(day, StatisticsDef.WEEK);
        String basicDataHql = configMap.get("hdfs.data.week.hql");
        basicDataHql = String.format(basicDataHql, tuple2._1(), tuple2._2());
        Dataset<Row> basicDataInfo = hiveChannel.readData(spark, basicDataHql);

        String exDataHql = configMap.get("hdfs.data.ex.week.hql");
        exDataHql = String.format(exDataHql, tuple2._1(), tuple2._2());
        Dataset<Row> exDataInfo = hiveChannel.readData(spark, exDataHql);

        List<Object> list = new ArrayList<>(2);
        list.add(basicDataInfo);
        list.add(exDataInfo);
        return list;
    }
}
