package com.navinfo.platform.wbs.service;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHdfs;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.wbs.dto.PropertiesConstant;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;

/**
 * 读取所需数据
 */
public enum OperationStatusLoadDataService implements ILoadDataService {
    INSTANCE;

    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;
    private ILoadDataChannel hdfsChannel = LoadDataFromHdfs.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String env = configMap.get("run.env");
        Dataset<Row> locationDataset;
        String day = configMap.get(ICommonService.DAY_VALUE);
        String hql0200 = configMap.get("hdfs.0200.hql");
        hql0200 = String.format(hql0200, day);
        if (StringUtils.equals(env, "local")) {
            //选取0200的列
            String hqlpath = configMap.get("hdfs.0200.path");
            Dataset<Row> allFiled = hdfsChannel.readData(spark, String.format(hqlpath, day));
            String tableName0200 = hql0200.substring(hql0200.indexOf("from") + 4 , hql0200.indexOf("where")).trim();
            allFiled.createOrReplaceTempView(tableName0200);
            String locationDataSql = hql0200.substring(0, hql0200.indexOf("where"));
            locationDataset = spark.sql(locationDataSql);
        } else {
            locationDataset = hiveChannel.readData(spark, hql0200);
        }
        return Collections.singletonList(locationDataset);
    }
}
