package com.navinfo.platform.dataanalysis.service.impl;

import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHdfs;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import com.navinfo.platform.common.service.impl.LoadDataFromMongo;
import com.navinfo.platform.common.utils.DateUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/***
 * @author gx
 */
public enum AnalysisLoadDataService implements ILoadDataService {
    /**
     *
     */
    INSTANCE;
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;
    private ILoadDataChannel hdfsChannel = LoadDataFromHdfs.INSTANCE;
    private ILoadDataChannel mongoChannel = LoadDataFromMongo.INSTANCE;

    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String env = configMap.get("run.env");
        Dataset<Row> locationDataset;
        Dataset<Row> realtimeDataset;
        Dataset<Row> tileDataset;
        String day = configMap.get(ICommonService.DAY_VALUE);
        String hql0200 = configMap.get("hdfs.0200.hql");
        hql0200 = String.format(hql0200, day);
        String hql0f37 = configMap.get("hdfs.0f37.hql");
        hql0f37 = String.format(hql0f37, day);
        String hqlTile = configMap.get("tile.hql");
        if (StringUtils.equals(env, "local")) {
            //选取0200的列
            String hqlpath = configMap.get("hdfs.0200.path");
            Dataset<Row> allFiled = hdfsChannel.readData(spark, String.format(hqlpath, day));
            String tableName0200 = hql0200.substring(hql0200.indexOf("from") + 4, hql0200.indexOf("where")).trim();
            allFiled.createOrReplaceTempView(tableName0200);
            String locationDataSql = hql0200.substring(0, hql0200.indexOf("where"));
            locationDataset = spark.sql(locationDataSql);

            //选取0f37的列
            String hql0f37path = configMap.get("hdfs.0f37.path");
            Dataset<Row> temp0f37Dataset = hdfsChannel.readData(spark, String.format(hql0f37path, day));
            String tableName0f37 = hql0f37.substring(hql0f37.indexOf("from") + 4, hql0f37.indexOf("where")).trim();
            temp0f37Dataset.createOrReplaceTempView(tableName0f37);
            String realtimeDataSql = hql0f37.substring(0, hql0f37.indexOf("where"));
            realtimeDataset = spark.sql(realtimeDataSql);

            //选取0f37的列
            String tilePath = configMap.get("tile.path");
            Dataset<Row> tempTileDataset = hdfsChannel.readData(spark, String.format(tilePath, day));
            String tableNameTile = hqlTile.substring(hqlTile.indexOf("from") + 4).trim();
            tempTileDataset.createOrReplaceTempView(tableNameTile);
            tileDataset = spark.sql(hqlTile);
        } else {
            locationDataset = hiveChannel.readData(spark, hql0200);
            realtimeDataset = hiveChannel.readData(spark, hql0f37);
            tileDataset = hiveChannel.readData(spark, hqlTile);
        }

        //ready mongo param
//        Map<String, String> mongoConfig = getMongoMap(configMap, day);
//        JavaMongoRDD<Document> javaMongoRDD = mongoChannel.readData(spark, mongoConfig);
        return Arrays.asList(locationDataset, realtimeDataset, tileDataset);
    }

    private Map<String, String> getMongoMap(Map<String, String> configMap, String day) {
        String mongoCollectionName = configMap.get("mongo.collection.name");
        String condition = configMap.get("mongo.contidion");
        String projection = configMap.get("mongo.projection");
        String date = DateUtils.format(day, "yyyyMMdd", "yyyy-MM-dd");
        Map<String, String> mongoConfig = new HashMap<>();
        mongoConfig.put("mongoCollectionName", mongoCollectionName);
        mongoConfig.put("collectionSuffix", "_" + day.substring(0, 6));
        if (condition != null) {
            mongoConfig.put("condition", String.format(condition, date));
        }
        if (projection != null) {
            mongoConfig.put("projection", projection);
        }
        return mongoConfig;
    }
}
