package com.navinfo.platform.wbs.service;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.utils.DateUtils;
import com.navinfo.platform.common.utils.StatisticsDef;
import com.navinfo.platform.wbs.dto.PropertiesConstant;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.joda.time.DateTime;
import scala.Tuple2;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

public enum OperationStatusLoadMonthDataService implements ILoadDataService {
    INSTANCE;

    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String day = configMap.get(ICommonService.DAY_VALUE);
        Tuple2<String, String> dateTuple = DateUtils.getStartEndDay(day, StatisticsDef.MONTH);
        String monthHql = configMap.get(PropertiesConstant.OPERATION_STATUA_MONTH_HQL);
        String hql = String.format(monthHql, Integer.parseInt(dateTuple._1()), Integer.parseInt(dateTuple._2()));
        System.out.println("----------================------------" + hql);
        Dataset<Row> dataset = hiveChannel.readData(spark, hql);
        String tid375Path = configMap.get(PropertiesConstant.TID_357_PATH);
        String tid500Path = configMap.get(PropertiesConstant.TID_500_PATH);
        Dataset<Row> tid375Data = spark.read().csv(tid375Path);
        Dataset<Row> tid500Data = spark.read().csv(tid500Path);
        return Arrays.asList(dataset, tid375Data, tid500Data);
    }
}
