package com.navinfo.platform.insurance.service;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.utils.DateUtils;
import com.navinfo.platform.common.utils.StatisticsDef;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.Collections;
import java.util.List;
import java.util.Map;

public enum DrivingInfoDataLoadService implements ILoadDataService {
    INS;
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        //传入的时间
        String day = configMap.get(ICommonService.DAY_VALUE);
        Tuple2<String, String> startEndDay = DateUtils.getStartEndDay(day, StatisticsDef.MONTH);

        String hql = String.format(configMap.get("hive.insurance.info.sql"), configMap.get("hive.insurance.db"), startEndDay._1, startEndDay._2);
        System.out.println("执行的hql为："+hql);

        return Collections.singletonList(hiveChannel.readData(spark, hql));
    }
}
