package com.navinfo.platform.insurance.service;

import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.utils.DateUtils;
import com.navinfo.platform.common.utils.StatisticsDef;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
import java.util.Map;

public enum DrivingReportDataLoadService implements ILoadDataService {
    INS;
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String startDay = configMap.get("args_1"); //统计的起始月份
        String endDay = configMap.get("args_2");    //统计的结束月份（包含）
        String db = configMap.get("hive.insurance.db");
        String hql = configMap.get("hive.insurance.report.sql");

        hql = String.format(hql, db, db, startDay, endDay);
        System.out.println("执行的hql为："+hql);

        return Collections.singletonList(hiveChannel.readData(spark, hql));
    }
}
